From 07af59b6bda82148350c26f4118591aa8b615bec Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 17 Aug 2025 06:28:04 +0000 Subject: [PATCH 1/3] Initial plan From 4360d715d4f1071f944e55dc390a03a67e06d197 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 17 Aug 2025 06:31:57 +0000 Subject: [PATCH 2/3] Initial analysis: Identified potential bugs in design patterns code Co-authored-by: makiolo <6398423+makiolo@users.noreply.github.com> --- node_modules/.bin/cmaki | 1 + node_modules/.bin/node-which | 1 + node_modules/.package-lock.json | 477 +++ node_modules/@nodelib/fs.scandir/LICENSE | 21 + node_modules/@nodelib/fs.scandir/README.md | 171 ++ .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 20 + .../@nodelib/fs.scandir/out/adapters/fs.js | 19 + .../@nodelib/fs.scandir/out/constants.d.ts | 4 + .../@nodelib/fs.scandir/out/constants.js | 17 + .../@nodelib/fs.scandir/out/index.d.ts | 12 + node_modules/@nodelib/fs.scandir/out/index.js | 26 + .../fs.scandir/out/providers/async.d.ts | 7 + .../fs.scandir/out/providers/async.js | 104 + .../fs.scandir/out/providers/common.d.ts | 1 + .../fs.scandir/out/providers/common.js | 13 + .../fs.scandir/out/providers/sync.d.ts | 5 + .../@nodelib/fs.scandir/out/providers/sync.js | 54 + .../@nodelib/fs.scandir/out/settings.d.ts | 20 + .../@nodelib/fs.scandir/out/settings.js | 24 + .../@nodelib/fs.scandir/out/types/index.d.ts | 20 + .../@nodelib/fs.scandir/out/types/index.js | 2 + .../@nodelib/fs.scandir/out/utils/fs.d.ts | 2 + .../@nodelib/fs.scandir/out/utils/fs.js | 19 + .../@nodelib/fs.scandir/out/utils/index.d.ts | 2 + .../@nodelib/fs.scandir/out/utils/index.js | 5 + node_modules/@nodelib/fs.scandir/package.json | 44 + node_modules/@nodelib/fs.stat/LICENSE | 21 + node_modules/@nodelib/fs.stat/README.md | 126 + .../@nodelib/fs.stat/out/adapters/fs.d.ts | 13 + .../@nodelib/fs.stat/out/adapters/fs.js | 17 + node_modules/@nodelib/fs.stat/out/index.d.ts | 12 + node_modules/@nodelib/fs.stat/out/index.js | 26 + .../@nodelib/fs.stat/out/providers/async.d.ts | 4 + .../@nodelib/fs.stat/out/providers/async.js | 36 + .../@nodelib/fs.stat/out/providers/sync.d.ts | 3 + .../@nodelib/fs.stat/out/providers/sync.js | 23 + .../@nodelib/fs.stat/out/settings.d.ts | 16 + node_modules/@nodelib/fs.stat/out/settings.js | 16 + .../@nodelib/fs.stat/out/types/index.d.ts | 4 + .../@nodelib/fs.stat/out/types/index.js | 2 + node_modules/@nodelib/fs.stat/package.json | 37 + node_modules/@nodelib/fs.walk/LICENSE | 21 + node_modules/@nodelib/fs.walk/README.md | 215 ++ node_modules/@nodelib/fs.walk/out/index.d.ts | 14 + node_modules/@nodelib/fs.walk/out/index.js | 34 + .../@nodelib/fs.walk/out/providers/async.d.ts | 12 + .../@nodelib/fs.walk/out/providers/async.js | 30 + .../@nodelib/fs.walk/out/providers/index.d.ts | 4 + .../@nodelib/fs.walk/out/providers/index.js | 9 + .../fs.walk/out/providers/stream.d.ts | 12 + .../@nodelib/fs.walk/out/providers/stream.js | 34 + .../@nodelib/fs.walk/out/providers/sync.d.ts | 10 + .../@nodelib/fs.walk/out/providers/sync.js | 14 + .../@nodelib/fs.walk/out/readers/async.d.ts | 30 + .../@nodelib/fs.walk/out/readers/async.js | 97 + .../@nodelib/fs.walk/out/readers/common.d.ts | 7 + .../@nodelib/fs.walk/out/readers/common.js | 31 + .../@nodelib/fs.walk/out/readers/reader.d.ts | 6 + .../@nodelib/fs.walk/out/readers/reader.js | 11 + .../@nodelib/fs.walk/out/readers/sync.d.ts | 15 + .../@nodelib/fs.walk/out/readers/sync.js | 59 + .../@nodelib/fs.walk/out/settings.d.ts | 30 + node_modules/@nodelib/fs.walk/out/settings.js | 26 + .../@nodelib/fs.walk/out/types/index.d.ts | 8 + .../@nodelib/fs.walk/out/types/index.js | 2 + node_modules/@nodelib/fs.walk/package.json | 44 + node_modules/braces/LICENSE | 21 + node_modules/braces/README.md | 586 ++++ node_modules/braces/index.js | 170 ++ node_modules/braces/lib/compile.js | 60 + node_modules/braces/lib/constants.js | 57 + node_modules/braces/lib/expand.js | 113 + node_modules/braces/lib/parse.js | 331 +++ node_modules/braces/lib/stringify.js | 32 + node_modules/braces/lib/utils.js | 122 + node_modules/braces/package.json | 77 + node_modules/cross-spawn/LICENSE | 21 + node_modules/cross-spawn/README.md | 89 + node_modules/cross-spawn/index.js | 39 + node_modules/cross-spawn/lib/enoent.js | 59 + node_modules/cross-spawn/lib/parse.js | 91 + node_modules/cross-spawn/lib/util/escape.js | 47 + .../cross-spawn/lib/util/readShebang.js | 23 + .../cross-spawn/lib/util/resolveCommand.js | 52 + node_modules/cross-spawn/package.json | 73 + node_modules/execa/index.d.ts | 564 ++++ node_modules/execa/index.js | 268 ++ node_modules/execa/lib/command.js | 52 + node_modules/execa/lib/error.js | 88 + node_modules/execa/lib/kill.js | 115 + node_modules/execa/lib/promise.js | 46 + node_modules/execa/lib/stdio.js | 52 + node_modules/execa/lib/stream.js | 97 + node_modules/execa/license | 9 + node_modules/execa/package.json | 74 + node_modules/execa/readme.md | 663 +++++ node_modules/fast-glob/LICENSE | 21 + node_modules/fast-glob/README.md | 830 ++++++ node_modules/fast-glob/out/index.d.ts | 40 + node_modules/fast-glob/out/index.js | 102 + .../fast-glob/out/managers/tasks.d.ts | 22 + node_modules/fast-glob/out/managers/tasks.js | 110 + .../fast-glob/out/providers/async.d.ts | 9 + node_modules/fast-glob/out/providers/async.js | 23 + .../fast-glob/out/providers/filters/deep.d.ts | 16 + .../fast-glob/out/providers/filters/deep.js | 62 + .../out/providers/filters/entry.d.ts | 17 + .../fast-glob/out/providers/filters/entry.js | 85 + .../out/providers/filters/error.d.ts | 8 + .../fast-glob/out/providers/filters/error.js | 15 + .../out/providers/matchers/matcher.d.ts | 33 + .../out/providers/matchers/matcher.js | 45 + .../out/providers/matchers/partial.d.ts | 4 + .../out/providers/matchers/partial.js | 38 + .../fast-glob/out/providers/provider.d.ts | 19 + .../fast-glob/out/providers/provider.js | 48 + .../fast-glob/out/providers/stream.d.ts | 11 + .../fast-glob/out/providers/stream.js | 31 + .../fast-glob/out/providers/sync.d.ts | 9 + node_modules/fast-glob/out/providers/sync.js | 23 + .../out/providers/transformers/entry.d.ts | 8 + .../out/providers/transformers/entry.js | 26 + node_modules/fast-glob/out/readers/async.d.ts | 10 + node_modules/fast-glob/out/readers/async.js | 35 + .../fast-glob/out/readers/reader.d.ts | 15 + node_modules/fast-glob/out/readers/reader.js | 33 + .../fast-glob/out/readers/stream.d.ts | 14 + node_modules/fast-glob/out/readers/stream.js | 55 + node_modules/fast-glob/out/readers/sync.d.ts | 12 + node_modules/fast-glob/out/readers/sync.js | 43 + node_modules/fast-glob/out/settings.d.ts | 164 ++ node_modules/fast-glob/out/settings.js | 59 + node_modules/fast-glob/out/types/index.d.ts | 31 + node_modules/fast-glob/out/types/index.js | 2 + node_modules/fast-glob/out/utils/array.d.ts | 2 + node_modules/fast-glob/out/utils/array.js | 22 + node_modules/fast-glob/out/utils/errno.d.ts | 2 + node_modules/fast-glob/out/utils/errno.js | 7 + node_modules/fast-glob/out/utils/fs.d.ts | 4 + node_modules/fast-glob/out/utils/fs.js | 19 + node_modules/fast-glob/out/utils/index.d.ts | 8 + node_modules/fast-glob/out/utils/index.js | 17 + node_modules/fast-glob/out/utils/path.d.ts | 13 + node_modules/fast-glob/out/utils/path.js | 68 + node_modules/fast-glob/out/utils/pattern.d.ts | 49 + node_modules/fast-glob/out/utils/pattern.js | 206 ++ node_modules/fast-glob/out/utils/stream.d.ts | 4 + node_modules/fast-glob/out/utils/stream.js | 17 + node_modules/fast-glob/out/utils/string.d.ts | 2 + node_modules/fast-glob/out/utils/string.js | 11 + node_modules/fast-glob/package.json | 81 + node_modules/fastq/.github/dependabot.yml | 11 + node_modules/fastq/.github/workflows/ci.yml | 75 + node_modules/fastq/LICENSE | 13 + node_modules/fastq/README.md | 312 ++ node_modules/fastq/SECURITY.md | 15 + node_modules/fastq/bench.js | 66 + node_modules/fastq/example.js | 14 + node_modules/fastq/example.mjs | 11 + node_modules/fastq/index.d.ts | 57 + node_modules/fastq/package.json | 53 + node_modules/fastq/queue.js | 311 ++ node_modules/fastq/test/example.ts | 83 + node_modules/fastq/test/promise.js | 291 ++ node_modules/fastq/test/test.js | 653 +++++ node_modules/fastq/test/tsconfig.json | 11 + node_modules/fill-range/LICENSE | 21 + node_modules/fill-range/README.md | 237 ++ node_modules/fill-range/index.js | 248 ++ node_modules/fill-range/package.json | 74 + node_modules/get-stream/buffer-stream.js | 52 + node_modules/get-stream/index.d.ts | 105 + node_modules/get-stream/index.js | 61 + node_modules/get-stream/license | 9 + node_modules/get-stream/package.json | 47 + node_modules/get-stream/readme.md | 124 + node_modules/glob-parent/CHANGELOG.md | 110 + node_modules/glob-parent/LICENSE | 15 + node_modules/glob-parent/README.md | 137 + node_modules/glob-parent/index.js | 42 + node_modules/glob-parent/package.json | 48 + node_modules/human-signals/CHANGELOG.md | 11 + node_modules/human-signals/LICENSE | 201 ++ node_modules/human-signals/README.md | 165 ++ node_modules/human-signals/build/src/core.js | 273 ++ .../human-signals/build/src/core.js.map | 1 + .../human-signals/build/src/main.d.ts | 52 + node_modules/human-signals/build/src/main.js | 71 + .../human-signals/build/src/main.js.map | 1 + .../human-signals/build/src/realtime.js | 19 + .../human-signals/build/src/realtime.js.map | 1 + .../human-signals/build/src/signals.js | 35 + .../human-signals/build/src/signals.js.map | 1 + node_modules/human-signals/package.json | 64 + node_modules/is-extglob/LICENSE | 21 + node_modules/is-extglob/README.md | 107 + node_modules/is-extglob/index.js | 20 + node_modules/is-extglob/package.json | 69 + node_modules/is-glob/LICENSE | 21 + node_modules/is-glob/README.md | 206 ++ node_modules/is-glob/index.js | 150 + node_modules/is-glob/package.json | 81 + node_modules/is-number/LICENSE | 21 + node_modules/is-number/README.md | 187 ++ node_modules/is-number/index.js | 18 + node_modules/is-number/package.json | 82 + node_modules/is-stream/index.d.ts | 79 + node_modules/is-stream/index.js | 28 + node_modules/is-stream/license | 9 + node_modules/is-stream/package.json | 42 + node_modules/is-stream/readme.md | 60 + node_modules/isexe/.npmignore | 2 + node_modules/isexe/LICENSE | 15 + node_modules/isexe/README.md | 51 + node_modules/isexe/index.js | 57 + node_modules/isexe/mode.js | 41 + node_modules/isexe/package.json | 31 + node_modules/isexe/test/basic.js | 221 ++ node_modules/isexe/windows.js | 42 + node_modules/merge-stream/LICENSE | 21 + node_modules/merge-stream/README.md | 78 + node_modules/merge-stream/index.js | 41 + node_modules/merge-stream/package.json | 19 + node_modules/merge2/LICENSE | 21 + node_modules/merge2/README.md | 144 + node_modules/merge2/index.js | 144 + node_modules/merge2/package.json | 43 + node_modules/metacommon/.travis.yml | 5 + node_modules/metacommon/README.md | 2 + node_modules/metacommon/cmaki.yml | 14 + node_modules/metacommon/common.h | 426 +++ node_modules/metacommon/compile.sh | 3 + node_modules/metacommon/package.json | 32 + node_modules/metacommon/setup.sh | 3 + node_modules/micromatch/LICENSE | 21 + node_modules/micromatch/README.md | 1024 +++++++ node_modules/micromatch/index.js | 474 +++ node_modules/micromatch/package.json | 119 + node_modules/mimic-fn/index.d.ts | 54 + node_modules/mimic-fn/index.js | 13 + node_modules/mimic-fn/license | 9 + node_modules/mimic-fn/package.json | 42 + node_modules/mimic-fn/readme.md | 69 + node_modules/npm-mas-mas/.travis.yml | 15 + node_modules/npm-mas-mas/LICENSE | 21 + node_modules/npm-mas-mas/Makefile | 21 + node_modules/npm-mas-mas/README | 57 + node_modules/npm-mas-mas/cmaki/.travis.yml | 5 + node_modules/npm-mas-mas/cmaki/GitUtils.cmake | 157 + node_modules/npm-mas-mas/cmaki/LICENSE | 22 + node_modules/npm-mas-mas/cmaki/README.md | 4 + node_modules/npm-mas-mas/cmaki/Utils.cmake | 32 + .../cmaki/ci/detect_operative_system.sh | 14 + node_modules/npm-mas-mas/cmaki/cmaki.cmake | 529 ++++ .../npm-mas-mas/cmaki/facts/facts.cmake | 735 +++++ .../npm-mas-mas/cmaki/init/.clang-format | 66 + .../npm-mas-mas/cmaki/junit/CTest2JUnit.xsl | 120 + .../npm-mas-mas/cmaki/junit/README.md | 3 + .../npm-mas-mas/cmaki_docker/.travis.yml | 4 + node_modules/npm-mas-mas/cmaki_docker/LICENSE | 21 + .../npm-mas-mas/cmaki_docker/README.md | 11 + .../npm-mas-mas/cmaki_docker/build.sh | 40 + .../cmaki_generator/CMakeLists.txt | 95 + .../npm-mas-mas/cmaki_generator/LICENSE | 22 + .../npm-mas-mas/cmaki_generator/README.md | 22 + .../npm-mas-mas/cmaki_generator/build | 10 + .../npm-mas-mas/cmaki_generator/build.cmd | 11 + .../npm-mas-mas/cmaki_generator/build.py | 757 +++++ .../cmaki_generator/check_remote_version.py | 233 ++ .../npm-mas-mas/cmaki_generator/common.yml | 498 ++++ .../cmaki_generator/compilation.py | 238 ++ .../cmaki_generator/download_package.py | 11 + .../cmaki_generator/get_package.py | 26 + .../cmaki_generator/get_return_code.py | 35 + .../cmaki_generator/gwen/CMakeLists.txt | 47 + .../cmaki_generator/hash_version.py | 172 ++ .../cmaki_generator/junit/CTest2JUnit.xsl | 120 + .../cmaki_generator/junit/README.md | 3 + .../librocket/Build/CMakeLists.txt | 687 +++++ .../cmaki_generator/librocket/CMakeLists.txt | 2 + .../cmaki_generator/noise/CMakeLists.txt | 26 + .../ois/demos/FFConsoleDemo.cpp | 1147 ++++++++ .../cmaki_generator/ois/demos/Makefile.am | 11 + .../cmaki_generator/ois/demos/OISConsole.cpp | 459 +++ .../ois/src/linux/LinuxForceFeedback.cpp | 563 ++++ .../ois/src/linux/LinuxJoyStickEvents.cpp | 308 ++ .../cmaki_generator/oxygine/CMakeLists.txt | 546 ++++ .../cmaki_generator/packages/assimp.yml | 13 + .../cmaki_generator/packages/box2d.yml | 23 + .../cmaki_generator/packages/bullet2.yml | 54 + .../cmaki_generator/packages/cryptopp.yml | 70 + .../packages/dune-freetype.yml | 28 + .../cmaki_generator/packages/dune-glew.yml | 29 + .../cmaki_generator/packages/dune-zlib.yml | 38 + .../cmaki_generator/packages/fmod.yml | 20 + .../cmaki_generator/packages/freeimage.yml | 36 + .../packages/freeimage_cmake.yml | 40 + .../cmaki_generator/packages/google-gmock.yml | 61 + .../cmaki_generator/packages/gwen.yml | 11 + .../cmaki_generator/packages/haxx-libcurl.yml | 71 + .../cmaki_generator/packages/json.yml | 26 + .../cmaki_generator/packages/librocket.yml | 24 + .../cmaki_generator/packages/msgpack.yml | 10 + .../cmaki_generator/packages/noise.yml | 11 + .../cmaki_generator/packages/ois.yml | 19 + .../cmaki_generator/packages/openssl.yml | 24 + .../cmaki_generator/packages/oxygine.yml | 25 + .../cmaki_generator/packages/paho-mqtt3.yml | 22 + .../cmaki_generator/packages/paho-mqttpp3.yml | 21 + .../cmaki_generator/packages/pugixml.yml | 11 + .../cmaki_generator/packages/python.yml | 21 + .../cmaki_generator/packages/raknet.yml | 11 + .../packages/restclient-cpp.yml | 17 + .../cmaki_generator/packages/sdl2.yml | 38 + .../cmaki_generator/packages/spdlog.yml | 14 + .../cmaki_generator/packages/tbb.yml | 49 + .../cmaki_generator/packages/yamlcpp.yml | 16 + .../npm-mas-mas/cmaki_generator/packing.py | 139 + .../paho-mqttpp3/CMakeLists.txt | 75 + .../paho-mqttpp3/src/CMakeLists.txt | 161 + .../npm-mas-mas/cmaki_generator/pipeline.py | 287 ++ .../npm-mas-mas/cmaki_generator/prepare.py | 72 + .../npm-mas-mas/cmaki_generator/purge.py | 36 + .../raknet/Lib/LibStatic/CMakeLists.txt | 34 + .../raknet/Source/CCRakNetSlidingWindow.cpp | 372 +++ .../raknet/Source/ReplicaManager3.cpp | 2593 +++++++++++++++++ .../npm-mas-mas/cmaki_generator/run.sh | 10 + .../npm-mas-mas/cmaki_generator/run_test.sh | 27 + .../npm-mas-mas/cmaki_generator/run_tests.py | 175 ++ .../cmaki_generator/save_package.py | 31 + .../sdl2-emscripten/CMakeLists.txt | 1366 +++++++++ .../cmaki_generator/sdl2/CMakeLists.txt | 1849 ++++++++++++ .../npm-mas-mas/cmaki_generator/sync.sh | 12 + .../cmaki_generator/third_party.py | 1508 ++++++++++ .../cmaki_generator/unittest/CMakeLists.txt | 30 + .../npm-mas-mas/cmaki_generator/upload.py | 35 + .../cmaki_generator/upload_package.py | 48 + .../npm-mas-mas/cmaki_generator/utils.py | 531 ++++ .../npm-mas-mas/cmaki_identifier/.travis.yml | 12 + .../cmaki_identifier/CMakeLists.txt | 6 + .../npm-mas-mas/cmaki_identifier/README.md | 19 + .../cmaki_identifier/boostorg_predef | 1 + .../cmaki_identifier/cmaki_emulator.sh | 36 + .../cmaki_identifier/cmaki_identifier.cmake | 12 + .../cmaki_identifier/cmaki_identifier.sh | 14 + .../cmaki_identifier/gcc/Debug/CMakeCache.txt | 113 + .../gcc/Debug/CMakeFiles/cmake.check_cache | 1 + .../cmaki_identifier/node_modules/.bin/cmaki | 1 + .../node_modules/.bin/node-which | 1 + .../node_modules/.package-lock.json | 471 +++ .../node_modules/@nodelib/fs.scandir/LICENSE | 21 + .../@nodelib/fs.scandir/README.md | 171 ++ .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 20 + .../@nodelib/fs.scandir/out/adapters/fs.js | 19 + .../@nodelib/fs.scandir/out/constants.d.ts | 4 + .../@nodelib/fs.scandir/out/constants.js | 17 + .../@nodelib/fs.scandir/out/index.d.ts | 12 + .../@nodelib/fs.scandir/out/index.js | 26 + .../fs.scandir/out/providers/async.d.ts | 7 + .../fs.scandir/out/providers/async.js | 104 + .../fs.scandir/out/providers/common.d.ts | 1 + .../fs.scandir/out/providers/common.js | 13 + .../fs.scandir/out/providers/sync.d.ts | 5 + .../@nodelib/fs.scandir/out/providers/sync.js | 54 + .../@nodelib/fs.scandir/out/settings.d.ts | 20 + .../@nodelib/fs.scandir/out/settings.js | 24 + .../@nodelib/fs.scandir/out/types/index.d.ts | 20 + .../@nodelib/fs.scandir/out/types/index.js | 2 + .../@nodelib/fs.scandir/out/utils/fs.d.ts | 2 + .../@nodelib/fs.scandir/out/utils/fs.js | 19 + .../@nodelib/fs.scandir/out/utils/index.d.ts | 2 + .../@nodelib/fs.scandir/out/utils/index.js | 5 + .../@nodelib/fs.scandir/package.json | 44 + .../node_modules/@nodelib/fs.stat/LICENSE | 21 + .../node_modules/@nodelib/fs.stat/README.md | 126 + .../@nodelib/fs.stat/out/adapters/fs.d.ts | 13 + .../@nodelib/fs.stat/out/adapters/fs.js | 17 + .../@nodelib/fs.stat/out/index.d.ts | 12 + .../@nodelib/fs.stat/out/index.js | 26 + .../@nodelib/fs.stat/out/providers/async.d.ts | 4 + .../@nodelib/fs.stat/out/providers/async.js | 36 + .../@nodelib/fs.stat/out/providers/sync.d.ts | 3 + .../@nodelib/fs.stat/out/providers/sync.js | 23 + .../@nodelib/fs.stat/out/settings.d.ts | 16 + .../@nodelib/fs.stat/out/settings.js | 16 + .../@nodelib/fs.stat/out/types/index.d.ts | 4 + .../@nodelib/fs.stat/out/types/index.js | 2 + .../@nodelib/fs.stat/package.json | 37 + .../node_modules/@nodelib/fs.walk/LICENSE | 21 + .../node_modules/@nodelib/fs.walk/README.md | 215 ++ .../@nodelib/fs.walk/out/index.d.ts | 14 + .../@nodelib/fs.walk/out/index.js | 34 + .../@nodelib/fs.walk/out/providers/async.d.ts | 12 + .../@nodelib/fs.walk/out/providers/async.js | 30 + .../@nodelib/fs.walk/out/providers/index.d.ts | 4 + .../@nodelib/fs.walk/out/providers/index.js | 9 + .../fs.walk/out/providers/stream.d.ts | 12 + .../@nodelib/fs.walk/out/providers/stream.js | 34 + .../@nodelib/fs.walk/out/providers/sync.d.ts | 10 + .../@nodelib/fs.walk/out/providers/sync.js | 14 + .../@nodelib/fs.walk/out/readers/async.d.ts | 30 + .../@nodelib/fs.walk/out/readers/async.js | 97 + .../@nodelib/fs.walk/out/readers/common.d.ts | 7 + .../@nodelib/fs.walk/out/readers/common.js | 31 + .../@nodelib/fs.walk/out/readers/reader.d.ts | 6 + .../@nodelib/fs.walk/out/readers/reader.js | 11 + .../@nodelib/fs.walk/out/readers/sync.d.ts | 15 + .../@nodelib/fs.walk/out/readers/sync.js | 59 + .../@nodelib/fs.walk/out/settings.d.ts | 30 + .../@nodelib/fs.walk/out/settings.js | 26 + .../@nodelib/fs.walk/out/types/index.d.ts | 8 + .../@nodelib/fs.walk/out/types/index.js | 2 + .../@nodelib/fs.walk/package.json | 44 + .../node_modules/braces/LICENSE | 21 + .../node_modules/braces/README.md | 586 ++++ .../node_modules/braces/index.js | 170 ++ .../node_modules/braces/lib/compile.js | 60 + .../node_modules/braces/lib/constants.js | 57 + .../node_modules/braces/lib/expand.js | 113 + .../node_modules/braces/lib/parse.js | 331 +++ .../node_modules/braces/lib/stringify.js | 32 + .../node_modules/braces/lib/utils.js | 122 + .../node_modules/braces/package.json | 77 + .../node_modules/cross-spawn/LICENSE | 21 + .../node_modules/cross-spawn/README.md | 89 + .../node_modules/cross-spawn/index.js | 39 + .../node_modules/cross-spawn/lib/enoent.js | 59 + .../node_modules/cross-spawn/lib/parse.js | 91 + .../cross-spawn/lib/util/escape.js | 47 + .../cross-spawn/lib/util/readShebang.js | 23 + .../cross-spawn/lib/util/resolveCommand.js | 52 + .../node_modules/cross-spawn/package.json | 73 + .../node_modules/execa/index.d.ts | 564 ++++ .../node_modules/execa/index.js | 268 ++ .../node_modules/execa/lib/command.js | 52 + .../node_modules/execa/lib/error.js | 88 + .../node_modules/execa/lib/kill.js | 115 + .../node_modules/execa/lib/promise.js | 46 + .../node_modules/execa/lib/stdio.js | 52 + .../node_modules/execa/lib/stream.js | 97 + .../node_modules/execa/license | 9 + .../node_modules/execa/package.json | 74 + .../node_modules/execa/readme.md | 663 +++++ .../node_modules/fast-glob/LICENSE | 21 + .../node_modules/fast-glob/README.md | 830 ++++++ .../node_modules/fast-glob/out/index.d.ts | 40 + .../node_modules/fast-glob/out/index.js | 102 + .../fast-glob/out/managers/tasks.d.ts | 22 + .../fast-glob/out/managers/tasks.js | 110 + .../fast-glob/out/providers/async.d.ts | 9 + .../fast-glob/out/providers/async.js | 23 + .../fast-glob/out/providers/filters/deep.d.ts | 16 + .../fast-glob/out/providers/filters/deep.js | 62 + .../out/providers/filters/entry.d.ts | 17 + .../fast-glob/out/providers/filters/entry.js | 85 + .../out/providers/filters/error.d.ts | 8 + .../fast-glob/out/providers/filters/error.js | 15 + .../out/providers/matchers/matcher.d.ts | 33 + .../out/providers/matchers/matcher.js | 45 + .../out/providers/matchers/partial.d.ts | 4 + .../out/providers/matchers/partial.js | 38 + .../fast-glob/out/providers/provider.d.ts | 19 + .../fast-glob/out/providers/provider.js | 48 + .../fast-glob/out/providers/stream.d.ts | 11 + .../fast-glob/out/providers/stream.js | 31 + .../fast-glob/out/providers/sync.d.ts | 9 + .../fast-glob/out/providers/sync.js | 23 + .../out/providers/transformers/entry.d.ts | 8 + .../out/providers/transformers/entry.js | 26 + .../fast-glob/out/readers/async.d.ts | 10 + .../fast-glob/out/readers/async.js | 35 + .../fast-glob/out/readers/reader.d.ts | 15 + .../fast-glob/out/readers/reader.js | 33 + .../fast-glob/out/readers/stream.d.ts | 14 + .../fast-glob/out/readers/stream.js | 55 + .../fast-glob/out/readers/sync.d.ts | 12 + .../fast-glob/out/readers/sync.js | 43 + .../node_modules/fast-glob/out/settings.d.ts | 164 ++ .../node_modules/fast-glob/out/settings.js | 59 + .../fast-glob/out/types/index.d.ts | 31 + .../node_modules/fast-glob/out/types/index.js | 2 + .../fast-glob/out/utils/array.d.ts | 2 + .../node_modules/fast-glob/out/utils/array.js | 22 + .../fast-glob/out/utils/errno.d.ts | 2 + .../node_modules/fast-glob/out/utils/errno.js | 7 + .../node_modules/fast-glob/out/utils/fs.d.ts | 4 + .../node_modules/fast-glob/out/utils/fs.js | 19 + .../fast-glob/out/utils/index.d.ts | 8 + .../node_modules/fast-glob/out/utils/index.js | 17 + .../fast-glob/out/utils/path.d.ts | 13 + .../node_modules/fast-glob/out/utils/path.js | 68 + .../fast-glob/out/utils/pattern.d.ts | 49 + .../fast-glob/out/utils/pattern.js | 206 ++ .../fast-glob/out/utils/stream.d.ts | 4 + .../fast-glob/out/utils/stream.js | 17 + .../fast-glob/out/utils/string.d.ts | 2 + .../fast-glob/out/utils/string.js | 11 + .../node_modules/fast-glob/package.json | 81 + .../node_modules/fastq/.github/dependabot.yml | 11 + .../fastq/.github/workflows/ci.yml | 75 + .../node_modules/fastq/LICENSE | 13 + .../node_modules/fastq/README.md | 312 ++ .../node_modules/fastq/SECURITY.md | 15 + .../node_modules/fastq/bench.js | 66 + .../node_modules/fastq/example.js | 14 + .../node_modules/fastq/example.mjs | 11 + .../node_modules/fastq/index.d.ts | 57 + .../node_modules/fastq/package.json | 53 + .../node_modules/fastq/queue.js | 311 ++ .../node_modules/fastq/test/example.ts | 83 + .../node_modules/fastq/test/promise.js | 291 ++ .../node_modules/fastq/test/test.js | 653 +++++ .../node_modules/fastq/test/tsconfig.json | 11 + .../node_modules/fill-range/LICENSE | 21 + .../node_modules/fill-range/README.md | 237 ++ .../node_modules/fill-range/index.js | 248 ++ .../node_modules/fill-range/package.json | 74 + .../node_modules/get-stream/buffer-stream.js | 52 + .../node_modules/get-stream/index.d.ts | 105 + .../node_modules/get-stream/index.js | 61 + .../node_modules/get-stream/license | 9 + .../node_modules/get-stream/package.json | 47 + .../node_modules/get-stream/readme.md | 124 + .../node_modules/glob-parent/CHANGELOG.md | 110 + .../node_modules/glob-parent/LICENSE | 15 + .../node_modules/glob-parent/README.md | 137 + .../node_modules/glob-parent/index.js | 42 + .../node_modules/glob-parent/package.json | 48 + .../node_modules/human-signals/CHANGELOG.md | 11 + .../node_modules/human-signals/LICENSE | 201 ++ .../node_modules/human-signals/README.md | 165 ++ .../human-signals/build/src/core.js | 273 ++ .../human-signals/build/src/core.js.map | 1 + .../human-signals/build/src/main.d.ts | 52 + .../human-signals/build/src/main.js | 71 + .../human-signals/build/src/main.js.map | 1 + .../human-signals/build/src/realtime.js | 19 + .../human-signals/build/src/realtime.js.map | 1 + .../human-signals/build/src/signals.js | 35 + .../human-signals/build/src/signals.js.map | 1 + .../node_modules/human-signals/package.json | 64 + .../node_modules/is-extglob/LICENSE | 21 + .../node_modules/is-extglob/README.md | 107 + .../node_modules/is-extglob/index.js | 20 + .../node_modules/is-extglob/package.json | 69 + .../node_modules/is-glob/LICENSE | 21 + .../node_modules/is-glob/README.md | 206 ++ .../node_modules/is-glob/index.js | 150 + .../node_modules/is-glob/package.json | 81 + .../node_modules/is-number/LICENSE | 21 + .../node_modules/is-number/README.md | 187 ++ .../node_modules/is-number/index.js | 18 + .../node_modules/is-number/package.json | 82 + .../node_modules/is-stream/index.d.ts | 79 + .../node_modules/is-stream/index.js | 28 + .../node_modules/is-stream/license | 9 + .../node_modules/is-stream/package.json | 42 + .../node_modules/is-stream/readme.md | 60 + .../node_modules/isexe/.npmignore | 2 + .../node_modules/isexe/LICENSE | 15 + .../node_modules/isexe/README.md | 51 + .../node_modules/isexe/index.js | 57 + .../node_modules/isexe/mode.js | 41 + .../node_modules/isexe/package.json | 31 + .../node_modules/isexe/test/basic.js | 221 ++ .../node_modules/isexe/windows.js | 42 + .../node_modules/merge-stream/LICENSE | 21 + .../node_modules/merge-stream/README.md | 78 + .../node_modules/merge-stream/index.js | 41 + .../node_modules/merge-stream/package.json | 19 + .../node_modules/merge2/LICENSE | 21 + .../node_modules/merge2/README.md | 144 + .../node_modules/merge2/index.js | 144 + .../node_modules/merge2/package.json | 43 + .../node_modules/micromatch/LICENSE | 21 + .../node_modules/micromatch/README.md | 1024 +++++++ .../node_modules/micromatch/index.js | 474 +++ .../node_modules/micromatch/package.json | 119 + .../node_modules/mimic-fn/index.d.ts | 54 + .../node_modules/mimic-fn/index.js | 13 + .../node_modules/mimic-fn/license | 9 + .../node_modules/mimic-fn/package.json | 42 + .../node_modules/mimic-fn/readme.md | 69 + .../node_modules/npm-mas-mas/.travis.yml | 15 + .../node_modules/npm-mas-mas/LICENSE | 21 + .../node_modules/npm-mas-mas/Makefile | 21 + .../node_modules/npm-mas-mas/README | 57 + .../npm-mas-mas/cmaki/.travis.yml | 5 + .../npm-mas-mas/cmaki/GitUtils.cmake | 157 + .../node_modules/npm-mas-mas/cmaki/LICENSE | 22 + .../node_modules/npm-mas-mas/cmaki/README.md | 4 + .../npm-mas-mas/cmaki/Utils.cmake | 32 + .../cmaki/ci/detect_operative_system.sh | 14 + .../npm-mas-mas/cmaki/cmaki.cmake | 529 ++++ .../npm-mas-mas/cmaki/facts/facts.cmake | 735 +++++ .../npm-mas-mas/cmaki/init/.clang-format | 66 + .../npm-mas-mas/cmaki/junit/CTest2JUnit.xsl | 120 + .../npm-mas-mas/cmaki/junit/README.md | 3 + .../npm-mas-mas/cmaki_docker/.travis.yml | 4 + .../npm-mas-mas/cmaki_docker/LICENSE | 21 + .../npm-mas-mas/cmaki_docker/README.md | 11 + .../npm-mas-mas/cmaki_docker/build.sh | 40 + .../cmaki_generator/CMakeLists.txt | 95 + .../npm-mas-mas/cmaki_generator/LICENSE | 22 + .../npm-mas-mas/cmaki_generator/README.md | 22 + .../npm-mas-mas/cmaki_generator/build | 10 + .../npm-mas-mas/cmaki_generator/build.cmd | 11 + .../npm-mas-mas/cmaki_generator/build.py | 757 +++++ .../cmaki_generator/check_remote_version.py | 233 ++ .../npm-mas-mas/cmaki_generator/common.yml | 498 ++++ .../cmaki_generator/compilation.py | 238 ++ .../cmaki_generator/download_package.py | 11 + .../cmaki_generator/get_package.py | 26 + .../cmaki_generator/get_return_code.py | 35 + .../cmaki_generator/gwen/CMakeLists.txt | 47 + .../cmaki_generator/hash_version.py | 172 ++ .../cmaki_generator/junit/CTest2JUnit.xsl | 120 + .../cmaki_generator/junit/README.md | 3 + .../librocket/Build/CMakeLists.txt | 687 +++++ .../cmaki_generator/librocket/CMakeLists.txt | 2 + .../cmaki_generator/noise/CMakeLists.txt | 26 + .../ois/demos/FFConsoleDemo.cpp | 1147 ++++++++ .../cmaki_generator/ois/demos/Makefile.am | 11 + .../cmaki_generator/ois/demos/OISConsole.cpp | 459 +++ .../ois/src/linux/LinuxForceFeedback.cpp | 563 ++++ .../ois/src/linux/LinuxJoyStickEvents.cpp | 308 ++ .../cmaki_generator/oxygine/CMakeLists.txt | 546 ++++ .../cmaki_generator/packages/assimp.yml | 13 + .../cmaki_generator/packages/box2d.yml | 23 + .../cmaki_generator/packages/bullet2.yml | 54 + .../cmaki_generator/packages/cryptopp.yml | 70 + .../packages/dune-freetype.yml | 28 + .../cmaki_generator/packages/dune-glew.yml | 29 + .../cmaki_generator/packages/dune-zlib.yml | 38 + .../cmaki_generator/packages/fmod.yml | 20 + .../cmaki_generator/packages/freeimage.yml | 36 + .../packages/freeimage_cmake.yml | 40 + .../cmaki_generator/packages/google-gmock.yml | 61 + .../cmaki_generator/packages/gwen.yml | 11 + .../cmaki_generator/packages/haxx-libcurl.yml | 71 + .../cmaki_generator/packages/json.yml | 26 + .../cmaki_generator/packages/librocket.yml | 24 + .../cmaki_generator/packages/msgpack.yml | 10 + .../cmaki_generator/packages/noise.yml | 11 + .../cmaki_generator/packages/ois.yml | 19 + .../cmaki_generator/packages/openssl.yml | 24 + .../cmaki_generator/packages/oxygine.yml | 25 + .../cmaki_generator/packages/paho-mqtt3.yml | 22 + .../cmaki_generator/packages/paho-mqttpp3.yml | 21 + .../cmaki_generator/packages/pugixml.yml | 11 + .../cmaki_generator/packages/python.yml | 21 + .../cmaki_generator/packages/raknet.yml | 11 + .../packages/restclient-cpp.yml | 17 + .../cmaki_generator/packages/sdl2.yml | 38 + .../cmaki_generator/packages/spdlog.yml | 14 + .../cmaki_generator/packages/tbb.yml | 49 + .../cmaki_generator/packages/yamlcpp.yml | 16 + .../npm-mas-mas/cmaki_generator/packing.py | 139 + .../paho-mqttpp3/CMakeLists.txt | 75 + .../paho-mqttpp3/src/CMakeLists.txt | 161 + .../npm-mas-mas/cmaki_generator/pipeline.py | 287 ++ .../npm-mas-mas/cmaki_generator/prepare.py | 72 + .../npm-mas-mas/cmaki_generator/purge.py | 36 + .../raknet/Lib/LibStatic/CMakeLists.txt | 34 + .../raknet/Source/CCRakNetSlidingWindow.cpp | 372 +++ .../raknet/Source/ReplicaManager3.cpp | 2593 +++++++++++++++++ .../npm-mas-mas/cmaki_generator/run.sh | 10 + .../npm-mas-mas/cmaki_generator/run_test.sh | 27 + .../npm-mas-mas/cmaki_generator/run_tests.py | 175 ++ .../cmaki_generator/save_package.py | 31 + .../sdl2-emscripten/CMakeLists.txt | 1366 +++++++++ .../cmaki_generator/sdl2/CMakeLists.txt | 1849 ++++++++++++ .../npm-mas-mas/cmaki_generator/sync.sh | 12 + .../cmaki_generator/third_party.py | 1508 ++++++++++ .../cmaki_generator/unittest/CMakeLists.txt | 30 + .../npm-mas-mas/cmaki_generator/upload.py | 35 + .../cmaki_generator/upload_package.py | 48 + .../npm-mas-mas/cmaki_generator/utils.py | 531 ++++ .../npm-mas-mas/cmaki_identifier/.travis.yml | 12 + .../cmaki_identifier/CMakeLists.txt | 6 + .../npm-mas-mas/cmaki_identifier/README.md | 19 + .../cmaki_identifier/cmaki_emulator.sh | 36 + .../cmaki_identifier/cmaki_identifier.cmake | 12 + .../cmaki_identifier/cmaki_identifier.sh | 14 + .../npm-mas-mas/cmaki_identifier/npm-do | 3 + .../npm-mas-mas/cmaki_identifier/package.json | 30 + .../npm-mas-mas/cmaki_identifier/setup.cmd | 7 + .../npm-mas-mas/cmaki_identifier/setup.sh | 8 + .../cmaki_identifier/tests/CMakeLists.txt | 33 + .../tests/cmaki_identifier.cpp | 345 +++ .../npm-mas-mas/cmaki_scripts/.travis.yml | 5 + .../npm-mas-mas/cmaki_scripts/LICENSE | 21 + .../npm-mas-mas/cmaki_scripts/README.md | 9 + .../npm-mas-mas/cmaki_scripts/bootstrap.cmd | 15 + .../npm-mas-mas/cmaki_scripts/ci.cmd | 40 + .../npm-mas-mas/cmaki_scripts/ci.sh | 46 + .../npm-mas-mas/cmaki_scripts/clean.cmd | 3 + .../npm-mas-mas/cmaki_scripts/clean.sh | 16 + .../npm-mas-mas/cmaki_scripts/cmaki.cmd | 22 + .../npm-mas-mas/cmaki_scripts/cmaki.js | 193 ++ .../cmaki_scripts/cmaki_depends.cmd | 7 + .../cmaki_scripts/cmaki_depends.sh | 50 + .../npm-mas-mas/cmaki_scripts/compile.cmd | 14 + .../npm-mas-mas/cmaki_scripts/compile.sh | 16 + .../cmaki_scripts/create_package.cmd | 28 + .../cmaki_scripts/create_package.sh | 15 + .../npm-mas-mas/cmaki_scripts/docker.sh | 22 + .../cmaki_scripts/head_detached.cmd | 6 + .../cmaki_scripts/head_detached.sh | 7 + .../npm-mas-mas/cmaki_scripts/init.sh | 18 + .../cmaki_scripts/make_artifact.cmd | 30 + .../cmaki_scripts/make_artifact.sh | 18 + .../npm-mas-mas/cmaki_scripts/publish.cmd | 3 + .../npm-mas-mas/cmaki_scripts/publish.sh | 3 + .../npm-mas-mas/cmaki_scripts/replace.sh | 44 + .../npm-mas-mas/cmaki_scripts/run.cmd | 5 + .../npm-mas-mas/cmaki_scripts/search.sh | 4 + .../npm-mas-mas/cmaki_scripts/setup.cmd | 64 + .../npm-mas-mas/cmaki_scripts/setup.sh | 54 + .../npm-mas-mas/cmaki_scripts/test.cmd | 15 + .../npm-mas-mas/cmaki_scripts/test.sh | 52 + .../npm-mas-mas/cmaki_scripts/upload.cmd | 29 + .../npm-mas-mas/cmaki_scripts/upload.sh | 12 + .../cmaki_scripts/upload_package.cmd | 5 + .../cmaki_scripts/upload_package.sh | 7 + .../npm-mas-mas/docker-compose.yml | 32 + .../docker/Dockerfile.android-arm64 | 9 + .../npm-mas-mas/docker/Dockerfile.linux-x64 | 16 + .../npm-mas-mas/docker/Dockerfile.windows-x64 | 9 + .../npm-mas-mas/docker/entrypoint.sh | 21 + .../node_modules/npm-mas-mas/package.json | 29 + .../npm-mas-mas/servfactor/Dockerfile | 15 + .../npm-mas-mas/servfactor/Makefile | 3 + .../npm-mas-mas/servfactor/NOTES.md | 4 + .../npm-mas-mas/servfactor/README.md | 10 + .../npm-mas-mas/servfactor/docker-compose.yml | 11 + .../npm-mas-mas/servfactor/download.php | 58 + .../npm-mas-mas/servfactor/index.php | 227 ++ .../npm-mas-mas/servfactor/packages/README.md | 2 + .../npm-mas-mas/servfactor/stats.php | 68 + .../npm-mas-mas/servfactor/upload.php | 76 + .../npm-mas-mas/servfactor/util.php | 2584 ++++++++++++++++ .../node_modules/npm-run-path/index.d.ts | 89 + .../node_modules/npm-run-path/index.js | 47 + .../node_modules/npm-run-path/license | 9 + .../node_modules/npm-run-path/package.json | 44 + .../node_modules/npm-run-path/readme.md | 115 + .../node_modules/onetime/index.d.ts | 64 + .../node_modules/onetime/index.js | 44 + .../node_modules/onetime/license | 9 + .../node_modules/onetime/package.json | 43 + .../node_modules/onetime/readme.md | 94 + .../node_modules/path-key/index.d.ts | 40 + .../node_modules/path-key/index.js | 16 + .../node_modules/path-key/license | 9 + .../node_modules/path-key/package.json | 39 + .../node_modules/path-key/readme.md | 61 + .../node_modules/picomatch/CHANGELOG.md | 136 + .../node_modules/picomatch/LICENSE | 21 + .../node_modules/picomatch/README.md | 708 +++++ .../node_modules/picomatch/index.js | 3 + .../node_modules/picomatch/lib/constants.js | 179 ++ .../node_modules/picomatch/lib/parse.js | 1091 +++++++ .../node_modules/picomatch/lib/picomatch.js | 342 +++ .../node_modules/picomatch/lib/scan.js | 391 +++ .../node_modules/picomatch/lib/utils.js | 64 + .../node_modules/picomatch/package.json | 81 + .../node_modules/queue-microtask/LICENSE | 20 + .../node_modules/queue-microtask/README.md | 90 + .../node_modules/queue-microtask/index.d.ts | 2 + .../node_modules/queue-microtask/index.js | 9 + .../node_modules/queue-microtask/package.json | 55 + .../reusify/.github/dependabot.yml | 7 + .../reusify/.github/workflows/ci.yml | 96 + .../node_modules/reusify/LICENSE | 22 + .../node_modules/reusify/README.md | 139 + .../node_modules/reusify/SECURITY.md | 15 + .../benchmarks/createNoCodeFunction.js | 30 + .../node_modules/reusify/benchmarks/fib.js | 13 + .../reusify/benchmarks/reuseNoCodeFunction.js | 38 + .../node_modules/reusify/eslint.config.js | 14 + .../node_modules/reusify/package.json | 50 + .../node_modules/reusify/reusify.d.ts | 14 + .../node_modules/reusify/reusify.js | 33 + .../node_modules/reusify/test.js | 66 + .../node_modules/reusify/tsconfig.json | 11 + .../node_modules/run-parallel/LICENSE | 20 + .../node_modules/run-parallel/README.md | 85 + .../node_modules/run-parallel/index.js | 51 + .../node_modules/run-parallel/package.json | 58 + .../node_modules/shebang-command/index.js | 19 + .../node_modules/shebang-command/license | 9 + .../node_modules/shebang-command/package.json | 34 + .../node_modules/shebang-command/readme.md | 34 + .../node_modules/shebang-regex/index.d.ts | 22 + .../node_modules/shebang-regex/index.js | 2 + .../node_modules/shebang-regex/license | 9 + .../node_modules/shebang-regex/package.json | 35 + .../node_modules/shebang-regex/readme.md | 33 + .../node_modules/shelljs/LICENSE | 29 + .../node_modules/shelljs/README.md | 949 ++++++ .../node_modules/shelljs/global.js | 15 + .../node_modules/shelljs/make.js | 57 + .../node_modules/shelljs/package.json | 90 + .../node_modules/shelljs/plugin.js | 16 + .../node_modules/shelljs/shell.js | 216 ++ .../node_modules/shelljs/src/cat.js | 76 + .../node_modules/shelljs/src/cd.js | 40 + .../node_modules/shelljs/src/chmod.js | 222 ++ .../node_modules/shelljs/src/cmd.js | 138 + .../node_modules/shelljs/src/common.js | 545 ++++ .../node_modules/shelljs/src/cp.js | 314 ++ .../node_modules/shelljs/src/dirs.js | 210 ++ .../node_modules/shelljs/src/echo.js | 62 + .../node_modules/shelljs/src/error.js | 15 + .../node_modules/shelljs/src/errorCode.js | 10 + .../node_modules/shelljs/src/exec-child.js | 71 + .../node_modules/shelljs/src/exec.js | 255 ++ .../node_modules/shelljs/src/find.js | 66 + .../node_modules/shelljs/src/grep.js | 198 ++ .../node_modules/shelljs/src/head.js | 107 + .../node_modules/shelljs/src/ln.js | 75 + .../node_modules/shelljs/src/ls.js | 155 + .../node_modules/shelljs/src/mkdir.js | 102 + .../node_modules/shelljs/src/mv.js | 119 + .../node_modules/shelljs/src/popd.js | 1 + .../node_modules/shelljs/src/pushd.js | 1 + .../node_modules/shelljs/src/pwd.js | 16 + .../node_modules/shelljs/src/rm.js | 201 ++ .../node_modules/shelljs/src/sed.js | 95 + .../node_modules/shelljs/src/set.js | 55 + .../node_modules/shelljs/src/sort.js | 98 + .../node_modules/shelljs/src/tail.js | 90 + .../node_modules/shelljs/src/tempdir.js | 75 + .../node_modules/shelljs/src/test.js | 86 + .../node_modules/shelljs/src/to.js | 38 + .../node_modules/shelljs/src/toEnd.js | 37 + .../node_modules/shelljs/src/touch.js | 117 + .../node_modules/shelljs/src/uniq.js | 93 + .../node_modules/shelljs/src/which.js | 119 + .../node_modules/signal-exit/LICENSE.txt | 16 + .../node_modules/signal-exit/README.md | 39 + .../node_modules/signal-exit/index.js | 202 ++ .../node_modules/signal-exit/package.json | 38 + .../node_modules/signal-exit/signals.js | 53 + .../node_modules/strip-final-newline/index.js | 16 + .../node_modules/strip-final-newline/license | 9 + .../strip-final-newline/package.json | 40 + .../strip-final-newline/readme.md | 30 + .../node_modules/to-regex-range/LICENSE | 21 + .../node_modules/to-regex-range/README.md | 305 ++ .../node_modules/to-regex-range/index.js | 288 ++ .../node_modules/to-regex-range/package.json | 88 + .../node_modules/which/CHANGELOG.md | 166 ++ .../node_modules/which/LICENSE | 15 + .../node_modules/which/README.md | 54 + .../node_modules/which/bin/node-which | 52 + .../node_modules/which/package.json | 43 + .../node_modules/which/which.js | 125 + .../npm-mas-mas/cmaki_identifier/npm-do | 3 + .../cmaki_identifier/package-lock.json | 480 +++ .../npm-mas-mas/cmaki_identifier/package.json | 30 + .../npm-mas-mas/cmaki_identifier/setup.cmd | 7 + .../npm-mas-mas/cmaki_identifier/setup.sh | 8 + .../cmaki_identifier/tests/CMakeLists.txt | 33 + .../tests/cmaki_identifier.cpp | 345 +++ .../npm-mas-mas/cmaki_scripts/.travis.yml | 5 + .../npm-mas-mas/cmaki_scripts/LICENSE | 21 + .../npm-mas-mas/cmaki_scripts/README.md | 9 + .../npm-mas-mas/cmaki_scripts/bootstrap.cmd | 15 + node_modules/npm-mas-mas/cmaki_scripts/ci.cmd | 40 + node_modules/npm-mas-mas/cmaki_scripts/ci.sh | 46 + .../npm-mas-mas/cmaki_scripts/clean.cmd | 3 + .../npm-mas-mas/cmaki_scripts/clean.sh | 16 + .../npm-mas-mas/cmaki_scripts/cmaki.cmd | 22 + .../npm-mas-mas/cmaki_scripts/cmaki.js | 193 ++ .../cmaki_scripts/cmaki_depends.cmd | 7 + .../cmaki_scripts/cmaki_depends.sh | 50 + .../npm-mas-mas/cmaki_scripts/compile.cmd | 14 + .../npm-mas-mas/cmaki_scripts/compile.sh | 16 + .../cmaki_scripts/create_package.cmd | 28 + .../cmaki_scripts/create_package.sh | 15 + .../npm-mas-mas/cmaki_scripts/docker.sh | 22 + .../cmaki_scripts/head_detached.cmd | 6 + .../cmaki_scripts/head_detached.sh | 7 + .../npm-mas-mas/cmaki_scripts/init.sh | 18 + .../cmaki_scripts/make_artifact.cmd | 30 + .../cmaki_scripts/make_artifact.sh | 18 + .../npm-mas-mas/cmaki_scripts/publish.cmd | 3 + .../npm-mas-mas/cmaki_scripts/publish.sh | 3 + .../npm-mas-mas/cmaki_scripts/replace.sh | 44 + .../npm-mas-mas/cmaki_scripts/run.cmd | 5 + .../npm-mas-mas/cmaki_scripts/search.sh | 4 + .../npm-mas-mas/cmaki_scripts/setup.cmd | 64 + .../npm-mas-mas/cmaki_scripts/setup.sh | 54 + .../npm-mas-mas/cmaki_scripts/test.cmd | 15 + .../npm-mas-mas/cmaki_scripts/test.sh | 52 + .../npm-mas-mas/cmaki_scripts/upload.cmd | 29 + .../npm-mas-mas/cmaki_scripts/upload.sh | 12 + .../cmaki_scripts/upload_package.cmd | 5 + .../cmaki_scripts/upload_package.sh | 7 + node_modules/npm-mas-mas/docker-compose.yml | 32 + .../docker/Dockerfile.android-arm64 | 9 + .../npm-mas-mas/docker/Dockerfile.linux-x64 | 16 + .../npm-mas-mas/docker/Dockerfile.windows-x64 | 9 + node_modules/npm-mas-mas/docker/entrypoint.sh | 21 + node_modules/npm-mas-mas/package.json | 29 + .../npm-mas-mas/servfactor/Dockerfile | 15 + node_modules/npm-mas-mas/servfactor/Makefile | 3 + node_modules/npm-mas-mas/servfactor/NOTES.md | 4 + node_modules/npm-mas-mas/servfactor/README.md | 10 + .../npm-mas-mas/servfactor/docker-compose.yml | 11 + .../npm-mas-mas/servfactor/download.php | 58 + node_modules/npm-mas-mas/servfactor/index.php | 227 ++ .../npm-mas-mas/servfactor/packages/README.md | 2 + node_modules/npm-mas-mas/servfactor/stats.php | 68 + .../npm-mas-mas/servfactor/upload.php | 76 + node_modules/npm-mas-mas/servfactor/util.php | 2584 ++++++++++++++++ node_modules/npm-run-path/index.d.ts | 89 + node_modules/npm-run-path/index.js | 47 + node_modules/npm-run-path/license | 9 + node_modules/npm-run-path/package.json | 44 + node_modules/npm-run-path/readme.md | 115 + node_modules/onetime/index.d.ts | 64 + node_modules/onetime/index.js | 44 + node_modules/onetime/license | 9 + node_modules/onetime/package.json | 43 + node_modules/onetime/readme.md | 94 + node_modules/path-key/index.d.ts | 40 + node_modules/path-key/index.js | 16 + node_modules/path-key/license | 9 + node_modules/path-key/package.json | 39 + node_modules/path-key/readme.md | 61 + node_modules/picomatch/CHANGELOG.md | 136 + node_modules/picomatch/LICENSE | 21 + node_modules/picomatch/README.md | 708 +++++ node_modules/picomatch/index.js | 3 + node_modules/picomatch/lib/constants.js | 179 ++ node_modules/picomatch/lib/parse.js | 1091 +++++++ node_modules/picomatch/lib/picomatch.js | 342 +++ node_modules/picomatch/lib/scan.js | 391 +++ node_modules/picomatch/lib/utils.js | 64 + node_modules/picomatch/package.json | 81 + node_modules/queue-microtask/LICENSE | 20 + node_modules/queue-microtask/README.md | 90 + node_modules/queue-microtask/index.d.ts | 2 + node_modules/queue-microtask/index.js | 9 + node_modules/queue-microtask/package.json | 55 + node_modules/reusify/.github/dependabot.yml | 7 + node_modules/reusify/.github/workflows/ci.yml | 96 + node_modules/reusify/LICENSE | 22 + node_modules/reusify/README.md | 139 + node_modules/reusify/SECURITY.md | 15 + .../benchmarks/createNoCodeFunction.js | 30 + node_modules/reusify/benchmarks/fib.js | 13 + .../reusify/benchmarks/reuseNoCodeFunction.js | 38 + node_modules/reusify/eslint.config.js | 14 + node_modules/reusify/package.json | 50 + node_modules/reusify/reusify.d.ts | 14 + node_modules/reusify/reusify.js | 33 + node_modules/reusify/test.js | 66 + node_modules/reusify/tsconfig.json | 11 + node_modules/run-parallel/LICENSE | 20 + node_modules/run-parallel/README.md | 85 + node_modules/run-parallel/index.js | 51 + node_modules/run-parallel/package.json | 58 + node_modules/shebang-command/index.js | 19 + node_modules/shebang-command/license | 9 + node_modules/shebang-command/package.json | 34 + node_modules/shebang-command/readme.md | 34 + node_modules/shebang-regex/index.d.ts | 22 + node_modules/shebang-regex/index.js | 2 + node_modules/shebang-regex/license | 9 + node_modules/shebang-regex/package.json | 35 + node_modules/shebang-regex/readme.md | 33 + node_modules/shelljs/LICENSE | 29 + node_modules/shelljs/README.md | 949 ++++++ node_modules/shelljs/global.js | 15 + node_modules/shelljs/make.js | 57 + node_modules/shelljs/package.json | 90 + node_modules/shelljs/plugin.js | 16 + node_modules/shelljs/shell.js | 216 ++ node_modules/shelljs/src/cat.js | 76 + node_modules/shelljs/src/cd.js | 40 + node_modules/shelljs/src/chmod.js | 222 ++ node_modules/shelljs/src/cmd.js | 138 + node_modules/shelljs/src/common.js | 545 ++++ node_modules/shelljs/src/cp.js | 314 ++ node_modules/shelljs/src/dirs.js | 210 ++ node_modules/shelljs/src/echo.js | 62 + node_modules/shelljs/src/error.js | 15 + node_modules/shelljs/src/errorCode.js | 10 + node_modules/shelljs/src/exec-child.js | 71 + node_modules/shelljs/src/exec.js | 255 ++ node_modules/shelljs/src/find.js | 66 + node_modules/shelljs/src/grep.js | 198 ++ node_modules/shelljs/src/head.js | 107 + node_modules/shelljs/src/ln.js | 75 + node_modules/shelljs/src/ls.js | 155 + node_modules/shelljs/src/mkdir.js | 102 + node_modules/shelljs/src/mv.js | 119 + node_modules/shelljs/src/popd.js | 1 + node_modules/shelljs/src/pushd.js | 1 + node_modules/shelljs/src/pwd.js | 16 + node_modules/shelljs/src/rm.js | 201 ++ node_modules/shelljs/src/sed.js | 95 + node_modules/shelljs/src/set.js | 55 + node_modules/shelljs/src/sort.js | 98 + node_modules/shelljs/src/tail.js | 90 + node_modules/shelljs/src/tempdir.js | 75 + node_modules/shelljs/src/test.js | 86 + node_modules/shelljs/src/to.js | 38 + node_modules/shelljs/src/toEnd.js | 37 + node_modules/shelljs/src/touch.js | 117 + node_modules/shelljs/src/uniq.js | 93 + node_modules/shelljs/src/which.js | 119 + node_modules/signal-exit/LICENSE.txt | 16 + node_modules/signal-exit/README.md | 39 + node_modules/signal-exit/index.js | 202 ++ node_modules/signal-exit/package.json | 38 + node_modules/signal-exit/signals.js | 53 + node_modules/strip-final-newline/index.js | 16 + node_modules/strip-final-newline/license | 9 + node_modules/strip-final-newline/package.json | 40 + node_modules/strip-final-newline/readme.md | 30 + node_modules/to-regex-range/LICENSE | 21 + node_modules/to-regex-range/README.md | 305 ++ node_modules/to-regex-range/index.js | 288 ++ node_modules/to-regex-range/package.json | 88 + node_modules/which/CHANGELOG.md | 166 ++ node_modules/which/LICENSE | 15 + node_modules/which/README.md | 54 + node_modules/which/bin/node-which | 52 + node_modules/which/package.json | 43 + node_modules/which/which.js | 125 + package-lock.json | 489 ++++ 1036 files changed, 104935 insertions(+) create mode 120000 node_modules/.bin/cmaki create mode 120000 node_modules/.bin/node-which create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/@nodelib/fs.scandir/LICENSE create mode 100644 node_modules/@nodelib/fs.scandir/README.md create mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.js create mode 100644 node_modules/@nodelib/fs.scandir/out/constants.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/constants.js create mode 100644 node_modules/@nodelib/fs.scandir/out/index.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/index.js create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.js create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.js create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.js create mode 100644 node_modules/@nodelib/fs.scandir/out/settings.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/settings.js create mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.js create mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.js create mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.d.ts create mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.js create mode 100644 node_modules/@nodelib/fs.scandir/package.json create mode 100644 node_modules/@nodelib/fs.stat/LICENSE create mode 100644 node_modules/@nodelib/fs.stat/README.md create mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.js create mode 100644 node_modules/@nodelib/fs.stat/out/index.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/index.js create mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.js create mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.js create mode 100644 node_modules/@nodelib/fs.stat/out/settings.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/settings.js create mode 100644 node_modules/@nodelib/fs.stat/out/types/index.d.ts create mode 100644 node_modules/@nodelib/fs.stat/out/types/index.js create mode 100644 node_modules/@nodelib/fs.stat/package.json create mode 100644 node_modules/@nodelib/fs.walk/LICENSE create mode 100644 node_modules/@nodelib/fs.walk/README.md create mode 100644 node_modules/@nodelib/fs.walk/out/index.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/index.js create mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.js create mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.js create mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.js create mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.js create mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.js create mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.js create mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.js create mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.js create mode 100644 node_modules/@nodelib/fs.walk/out/settings.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/settings.js create mode 100644 node_modules/@nodelib/fs.walk/out/types/index.d.ts create mode 100644 node_modules/@nodelib/fs.walk/out/types/index.js create mode 100644 node_modules/@nodelib/fs.walk/package.json create mode 100644 node_modules/braces/LICENSE create mode 100644 node_modules/braces/README.md create mode 100644 node_modules/braces/index.js create mode 100644 node_modules/braces/lib/compile.js create mode 100644 node_modules/braces/lib/constants.js create mode 100644 node_modules/braces/lib/expand.js create mode 100644 node_modules/braces/lib/parse.js create mode 100644 node_modules/braces/lib/stringify.js create mode 100644 node_modules/braces/lib/utils.js create mode 100644 node_modules/braces/package.json create mode 100644 node_modules/cross-spawn/LICENSE create mode 100644 node_modules/cross-spawn/README.md create mode 100644 node_modules/cross-spawn/index.js create mode 100644 node_modules/cross-spawn/lib/enoent.js create mode 100644 node_modules/cross-spawn/lib/parse.js create mode 100644 node_modules/cross-spawn/lib/util/escape.js create mode 100644 node_modules/cross-spawn/lib/util/readShebang.js create mode 100644 node_modules/cross-spawn/lib/util/resolveCommand.js create mode 100644 node_modules/cross-spawn/package.json create mode 100644 node_modules/execa/index.d.ts create mode 100644 node_modules/execa/index.js create mode 100644 node_modules/execa/lib/command.js create mode 100644 node_modules/execa/lib/error.js create mode 100644 node_modules/execa/lib/kill.js create mode 100644 node_modules/execa/lib/promise.js create mode 100644 node_modules/execa/lib/stdio.js create mode 100644 node_modules/execa/lib/stream.js create mode 100644 node_modules/execa/license create mode 100644 node_modules/execa/package.json create mode 100644 node_modules/execa/readme.md create mode 100644 node_modules/fast-glob/LICENSE create mode 100644 node_modules/fast-glob/README.md create mode 100644 node_modules/fast-glob/out/index.d.ts create mode 100644 node_modules/fast-glob/out/index.js create mode 100644 node_modules/fast-glob/out/managers/tasks.d.ts create mode 100644 node_modules/fast-glob/out/managers/tasks.js create mode 100644 node_modules/fast-glob/out/providers/async.d.ts create mode 100644 node_modules/fast-glob/out/providers/async.js create mode 100644 node_modules/fast-glob/out/providers/filters/deep.d.ts create mode 100644 node_modules/fast-glob/out/providers/filters/deep.js create mode 100644 node_modules/fast-glob/out/providers/filters/entry.d.ts create mode 100644 node_modules/fast-glob/out/providers/filters/entry.js create mode 100644 node_modules/fast-glob/out/providers/filters/error.d.ts create mode 100644 node_modules/fast-glob/out/providers/filters/error.js create mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.d.ts create mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.js create mode 100644 node_modules/fast-glob/out/providers/matchers/partial.d.ts create mode 100644 node_modules/fast-glob/out/providers/matchers/partial.js create mode 100644 node_modules/fast-glob/out/providers/provider.d.ts create mode 100644 node_modules/fast-glob/out/providers/provider.js create mode 100644 node_modules/fast-glob/out/providers/stream.d.ts create mode 100644 node_modules/fast-glob/out/providers/stream.js create mode 100644 node_modules/fast-glob/out/providers/sync.d.ts create mode 100644 node_modules/fast-glob/out/providers/sync.js create mode 100644 node_modules/fast-glob/out/providers/transformers/entry.d.ts create mode 100644 node_modules/fast-glob/out/providers/transformers/entry.js create mode 100644 node_modules/fast-glob/out/readers/async.d.ts create mode 100644 node_modules/fast-glob/out/readers/async.js create mode 100644 node_modules/fast-glob/out/readers/reader.d.ts create mode 100644 node_modules/fast-glob/out/readers/reader.js create mode 100644 node_modules/fast-glob/out/readers/stream.d.ts create mode 100644 node_modules/fast-glob/out/readers/stream.js create mode 100644 node_modules/fast-glob/out/readers/sync.d.ts create mode 100644 node_modules/fast-glob/out/readers/sync.js create mode 100644 node_modules/fast-glob/out/settings.d.ts create mode 100644 node_modules/fast-glob/out/settings.js create mode 100644 node_modules/fast-glob/out/types/index.d.ts create mode 100644 node_modules/fast-glob/out/types/index.js create mode 100644 node_modules/fast-glob/out/utils/array.d.ts create mode 100644 node_modules/fast-glob/out/utils/array.js create mode 100644 node_modules/fast-glob/out/utils/errno.d.ts create mode 100644 node_modules/fast-glob/out/utils/errno.js create mode 100644 node_modules/fast-glob/out/utils/fs.d.ts create mode 100644 node_modules/fast-glob/out/utils/fs.js create mode 100644 node_modules/fast-glob/out/utils/index.d.ts create mode 100644 node_modules/fast-glob/out/utils/index.js create mode 100644 node_modules/fast-glob/out/utils/path.d.ts create mode 100644 node_modules/fast-glob/out/utils/path.js create mode 100644 node_modules/fast-glob/out/utils/pattern.d.ts create mode 100644 node_modules/fast-glob/out/utils/pattern.js create mode 100644 node_modules/fast-glob/out/utils/stream.d.ts create mode 100644 node_modules/fast-glob/out/utils/stream.js create mode 100644 node_modules/fast-glob/out/utils/string.d.ts create mode 100644 node_modules/fast-glob/out/utils/string.js create mode 100644 node_modules/fast-glob/package.json create mode 100644 node_modules/fastq/.github/dependabot.yml create mode 100644 node_modules/fastq/.github/workflows/ci.yml create mode 100644 node_modules/fastq/LICENSE create mode 100644 node_modules/fastq/README.md create mode 100644 node_modules/fastq/SECURITY.md create mode 100644 node_modules/fastq/bench.js create mode 100644 node_modules/fastq/example.js create mode 100644 node_modules/fastq/example.mjs create mode 100644 node_modules/fastq/index.d.ts create mode 100644 node_modules/fastq/package.json create mode 100644 node_modules/fastq/queue.js create mode 100644 node_modules/fastq/test/example.ts create mode 100644 node_modules/fastq/test/promise.js create mode 100644 node_modules/fastq/test/test.js create mode 100644 node_modules/fastq/test/tsconfig.json create mode 100644 node_modules/fill-range/LICENSE create mode 100644 node_modules/fill-range/README.md create mode 100644 node_modules/fill-range/index.js create mode 100644 node_modules/fill-range/package.json create mode 100644 node_modules/get-stream/buffer-stream.js create mode 100644 node_modules/get-stream/index.d.ts create mode 100644 node_modules/get-stream/index.js create mode 100644 node_modules/get-stream/license create mode 100644 node_modules/get-stream/package.json create mode 100644 node_modules/get-stream/readme.md create mode 100644 node_modules/glob-parent/CHANGELOG.md create mode 100644 node_modules/glob-parent/LICENSE create mode 100644 node_modules/glob-parent/README.md create mode 100644 node_modules/glob-parent/index.js create mode 100644 node_modules/glob-parent/package.json create mode 100644 node_modules/human-signals/CHANGELOG.md create mode 100644 node_modules/human-signals/LICENSE create mode 100644 node_modules/human-signals/README.md create mode 100644 node_modules/human-signals/build/src/core.js create mode 100644 node_modules/human-signals/build/src/core.js.map create mode 100644 node_modules/human-signals/build/src/main.d.ts create mode 100644 node_modules/human-signals/build/src/main.js create mode 100644 node_modules/human-signals/build/src/main.js.map create mode 100644 node_modules/human-signals/build/src/realtime.js create mode 100644 node_modules/human-signals/build/src/realtime.js.map create mode 100644 node_modules/human-signals/build/src/signals.js create mode 100644 node_modules/human-signals/build/src/signals.js.map create mode 100644 node_modules/human-signals/package.json create mode 100644 node_modules/is-extglob/LICENSE create mode 100644 node_modules/is-extglob/README.md create mode 100644 node_modules/is-extglob/index.js create mode 100644 node_modules/is-extglob/package.json create mode 100644 node_modules/is-glob/LICENSE create mode 100644 node_modules/is-glob/README.md create mode 100644 node_modules/is-glob/index.js create mode 100644 node_modules/is-glob/package.json create mode 100644 node_modules/is-number/LICENSE create mode 100644 node_modules/is-number/README.md create mode 100644 node_modules/is-number/index.js create mode 100644 node_modules/is-number/package.json create mode 100644 node_modules/is-stream/index.d.ts create mode 100644 node_modules/is-stream/index.js create mode 100644 node_modules/is-stream/license create mode 100644 node_modules/is-stream/package.json create mode 100644 node_modules/is-stream/readme.md create mode 100644 node_modules/isexe/.npmignore create mode 100644 node_modules/isexe/LICENSE create mode 100644 node_modules/isexe/README.md create mode 100644 node_modules/isexe/index.js create mode 100644 node_modules/isexe/mode.js create mode 100644 node_modules/isexe/package.json create mode 100644 node_modules/isexe/test/basic.js create mode 100644 node_modules/isexe/windows.js create mode 100644 node_modules/merge-stream/LICENSE create mode 100644 node_modules/merge-stream/README.md create mode 100644 node_modules/merge-stream/index.js create mode 100644 node_modules/merge-stream/package.json create mode 100644 node_modules/merge2/LICENSE create mode 100644 node_modules/merge2/README.md create mode 100644 node_modules/merge2/index.js create mode 100644 node_modules/merge2/package.json create mode 100644 node_modules/metacommon/.travis.yml create mode 100644 node_modules/metacommon/README.md create mode 100644 node_modules/metacommon/cmaki.yml create mode 100644 node_modules/metacommon/common.h create mode 100644 node_modules/metacommon/compile.sh create mode 100644 node_modules/metacommon/package.json create mode 100644 node_modules/metacommon/setup.sh create mode 100755 node_modules/micromatch/LICENSE create mode 100644 node_modules/micromatch/README.md create mode 100644 node_modules/micromatch/index.js create mode 100644 node_modules/micromatch/package.json create mode 100644 node_modules/mimic-fn/index.d.ts create mode 100644 node_modules/mimic-fn/index.js create mode 100644 node_modules/mimic-fn/license create mode 100644 node_modules/mimic-fn/package.json create mode 100644 node_modules/mimic-fn/readme.md create mode 100644 node_modules/npm-mas-mas/.travis.yml create mode 100644 node_modules/npm-mas-mas/LICENSE create mode 100644 node_modules/npm-mas-mas/Makefile create mode 100644 node_modules/npm-mas-mas/README create mode 100644 node_modules/npm-mas-mas/cmaki/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki/GitUtils.cmake create mode 100644 node_modules/npm-mas-mas/cmaki/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki/README.md create mode 100644 node_modules/npm-mas-mas/cmaki/Utils.cmake create mode 100755 node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh create mode 100644 node_modules/npm-mas-mas/cmaki/cmaki.cmake create mode 100644 node_modules/npm-mas-mas/cmaki/facts/facts.cmake create mode 100644 node_modules/npm-mas-mas/cmaki/init/.clang-format create mode 100644 node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl create mode 100644 node_modules/npm-mas-mas/cmaki/junit/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_docker/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_docker/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_docker/README.md create mode 100755 node_modules/npm-mas-mas/cmaki_docker/build.sh create mode 100644 node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_generator/README.md create mode 100755 node_modules/npm-mas-mas/cmaki_generator/build create mode 100644 node_modules/npm-mas-mas/cmaki_generator/build.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_generator/build.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/common.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/compilation.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/download_package.py create mode 100755 node_modules/npm-mas-mas/cmaki_generator/get_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/get_return_code.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/hash_version.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl create mode 100644 node_modules/npm-mas-mas/cmaki_generator/junit/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am create mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/json.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/python.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_generator/packing.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/pipeline.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/prepare.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/purge.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Source/ReplicaManager3.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_generator/run.sh create mode 100644 node_modules/npm-mas-mas/cmaki_generator/run_test.sh create mode 100644 node_modules/npm-mas-mas/cmaki_generator/run_tests.py create mode 100755 node_modules/npm-mas-mas/cmaki_generator/save_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/sync.sh create mode 100644 node_modules/npm-mas-mas/cmaki_generator/third_party.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_generator/upload.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/upload_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_generator/utils.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/README.md create mode 160000 node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache create mode 120000 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki create mode 120000 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/ReplicaManager3.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md create mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/npm-do create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/package-lock.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/package.json create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/setup.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/setup.sh create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt create mode 100644 node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/.travis.yml create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/LICENSE create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/README.md create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/ci.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/ci.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/clean.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/clean.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/cmaki.js create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/compile.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/compile.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/create_package.sh create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/docker.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/init.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/publish.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/publish.sh create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/replace.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/run.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/search.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/setup.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/setup.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/test.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/test.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload.cmd create mode 100755 node_modules/npm-mas-mas/cmaki_scripts/upload.sh create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd create mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh create mode 100644 node_modules/npm-mas-mas/docker-compose.yml create mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 create mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 create mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 create mode 100755 node_modules/npm-mas-mas/docker/entrypoint.sh create mode 100644 node_modules/npm-mas-mas/package.json create mode 100644 node_modules/npm-mas-mas/servfactor/Dockerfile create mode 100644 node_modules/npm-mas-mas/servfactor/Makefile create mode 100644 node_modules/npm-mas-mas/servfactor/NOTES.md create mode 100644 node_modules/npm-mas-mas/servfactor/README.md create mode 100644 node_modules/npm-mas-mas/servfactor/docker-compose.yml create mode 100755 node_modules/npm-mas-mas/servfactor/download.php create mode 100755 node_modules/npm-mas-mas/servfactor/index.php create mode 100644 node_modules/npm-mas-mas/servfactor/packages/README.md create mode 100644 node_modules/npm-mas-mas/servfactor/stats.php create mode 100644 node_modules/npm-mas-mas/servfactor/upload.php create mode 100755 node_modules/npm-mas-mas/servfactor/util.php create mode 100644 node_modules/npm-run-path/index.d.ts create mode 100644 node_modules/npm-run-path/index.js create mode 100644 node_modules/npm-run-path/license create mode 100644 node_modules/npm-run-path/package.json create mode 100644 node_modules/npm-run-path/readme.md create mode 100644 node_modules/onetime/index.d.ts create mode 100644 node_modules/onetime/index.js create mode 100644 node_modules/onetime/license create mode 100644 node_modules/onetime/package.json create mode 100644 node_modules/onetime/readme.md create mode 100644 node_modules/path-key/index.d.ts create mode 100644 node_modules/path-key/index.js create mode 100644 node_modules/path-key/license create mode 100644 node_modules/path-key/package.json create mode 100644 node_modules/path-key/readme.md create mode 100644 node_modules/picomatch/CHANGELOG.md create mode 100644 node_modules/picomatch/LICENSE create mode 100644 node_modules/picomatch/README.md create mode 100644 node_modules/picomatch/index.js create mode 100644 node_modules/picomatch/lib/constants.js create mode 100644 node_modules/picomatch/lib/parse.js create mode 100644 node_modules/picomatch/lib/picomatch.js create mode 100644 node_modules/picomatch/lib/scan.js create mode 100644 node_modules/picomatch/lib/utils.js create mode 100644 node_modules/picomatch/package.json create mode 100755 node_modules/queue-microtask/LICENSE create mode 100644 node_modules/queue-microtask/README.md create mode 100644 node_modules/queue-microtask/index.d.ts create mode 100644 node_modules/queue-microtask/index.js create mode 100644 node_modules/queue-microtask/package.json create mode 100644 node_modules/reusify/.github/dependabot.yml create mode 100644 node_modules/reusify/.github/workflows/ci.yml create mode 100644 node_modules/reusify/LICENSE create mode 100644 node_modules/reusify/README.md create mode 100644 node_modules/reusify/SECURITY.md create mode 100644 node_modules/reusify/benchmarks/createNoCodeFunction.js create mode 100644 node_modules/reusify/benchmarks/fib.js create mode 100644 node_modules/reusify/benchmarks/reuseNoCodeFunction.js create mode 100644 node_modules/reusify/eslint.config.js create mode 100644 node_modules/reusify/package.json create mode 100644 node_modules/reusify/reusify.d.ts create mode 100644 node_modules/reusify/reusify.js create mode 100644 node_modules/reusify/test.js create mode 100644 node_modules/reusify/tsconfig.json create mode 100644 node_modules/run-parallel/LICENSE create mode 100644 node_modules/run-parallel/README.md create mode 100644 node_modules/run-parallel/index.js create mode 100644 node_modules/run-parallel/package.json create mode 100644 node_modules/shebang-command/index.js create mode 100644 node_modules/shebang-command/license create mode 100644 node_modules/shebang-command/package.json create mode 100644 node_modules/shebang-command/readme.md create mode 100644 node_modules/shebang-regex/index.d.ts create mode 100644 node_modules/shebang-regex/index.js create mode 100644 node_modules/shebang-regex/license create mode 100644 node_modules/shebang-regex/package.json create mode 100644 node_modules/shebang-regex/readme.md create mode 100644 node_modules/shelljs/LICENSE create mode 100644 node_modules/shelljs/README.md create mode 100644 node_modules/shelljs/global.js create mode 100644 node_modules/shelljs/make.js create mode 100644 node_modules/shelljs/package.json create mode 100644 node_modules/shelljs/plugin.js create mode 100644 node_modules/shelljs/shell.js create mode 100644 node_modules/shelljs/src/cat.js create mode 100644 node_modules/shelljs/src/cd.js create mode 100644 node_modules/shelljs/src/chmod.js create mode 100644 node_modules/shelljs/src/cmd.js create mode 100644 node_modules/shelljs/src/common.js create mode 100644 node_modules/shelljs/src/cp.js create mode 100644 node_modules/shelljs/src/dirs.js create mode 100644 node_modules/shelljs/src/echo.js create mode 100644 node_modules/shelljs/src/error.js create mode 100644 node_modules/shelljs/src/errorCode.js create mode 100644 node_modules/shelljs/src/exec-child.js create mode 100644 node_modules/shelljs/src/exec.js create mode 100644 node_modules/shelljs/src/find.js create mode 100644 node_modules/shelljs/src/grep.js create mode 100644 node_modules/shelljs/src/head.js create mode 100644 node_modules/shelljs/src/ln.js create mode 100644 node_modules/shelljs/src/ls.js create mode 100644 node_modules/shelljs/src/mkdir.js create mode 100644 node_modules/shelljs/src/mv.js create mode 100644 node_modules/shelljs/src/popd.js create mode 100644 node_modules/shelljs/src/pushd.js create mode 100644 node_modules/shelljs/src/pwd.js create mode 100644 node_modules/shelljs/src/rm.js create mode 100644 node_modules/shelljs/src/sed.js create mode 100644 node_modules/shelljs/src/set.js create mode 100644 node_modules/shelljs/src/sort.js create mode 100644 node_modules/shelljs/src/tail.js create mode 100644 node_modules/shelljs/src/tempdir.js create mode 100644 node_modules/shelljs/src/test.js create mode 100644 node_modules/shelljs/src/to.js create mode 100644 node_modules/shelljs/src/toEnd.js create mode 100644 node_modules/shelljs/src/touch.js create mode 100644 node_modules/shelljs/src/uniq.js create mode 100644 node_modules/shelljs/src/which.js create mode 100644 node_modules/signal-exit/LICENSE.txt create mode 100644 node_modules/signal-exit/README.md create mode 100644 node_modules/signal-exit/index.js create mode 100644 node_modules/signal-exit/package.json create mode 100644 node_modules/signal-exit/signals.js create mode 100644 node_modules/strip-final-newline/index.js create mode 100644 node_modules/strip-final-newline/license create mode 100644 node_modules/strip-final-newline/package.json create mode 100644 node_modules/strip-final-newline/readme.md create mode 100644 node_modules/to-regex-range/LICENSE create mode 100644 node_modules/to-regex-range/README.md create mode 100644 node_modules/to-regex-range/index.js create mode 100644 node_modules/to-regex-range/package.json create mode 100644 node_modules/which/CHANGELOG.md create mode 100644 node_modules/which/LICENSE create mode 100644 node_modules/which/README.md create mode 100755 node_modules/which/bin/node-which create mode 100644 node_modules/which/package.json create mode 100644 node_modules/which/which.js create mode 100644 package-lock.json diff --git a/node_modules/.bin/cmaki b/node_modules/.bin/cmaki new file mode 120000 index 0000000..1e97214 --- /dev/null +++ b/node_modules/.bin/cmaki @@ -0,0 +1 @@ +../npm-mas-mas/cmaki_scripts/cmaki.js \ No newline at end of file diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which new file mode 120000 index 0000000..6f8415e --- /dev/null +++ b/node_modules/.bin/node-which @@ -0,0 +1 @@ +../which/bin/node-which \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..9b65d05 --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,477 @@ +{ + "name": "design-patterns-cpp14", + "version": "1.0.23", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/metacommon": { + "version": "1.0.1", + "resolved": "git+ssh://git@github.com/makiolo/metacommon.git#0eeff0ebfdbe322e8c01ee9c5cfae58dbf8b1f0c", + "hasInstallScript": true, + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm-mas-mas": { + "version": "0.0.1", + "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", + "dev": true, + "license": "MIT", + "dependencies": { + "shelljs": ">=0.8.5" + }, + "bin": { + "cmaki": "cmaki_scripts/cmaki.js" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", + "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + } + } +} diff --git a/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/@nodelib/fs.scandir/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.scandir/README.md b/node_modules/@nodelib/fs.scandir/README.md new file mode 100644 index 0000000..e0b218b --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/README.md @@ -0,0 +1,171 @@ +# @nodelib/fs.scandir + +> List files and directories inside the specified directory. + +## :bulb: Highlights + +The package is aimed at obtaining information about entries in the directory. + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.scandir +``` + +## Usage + +```ts +import * as fsScandir from '@nodelib/fs.scandir'; + +fsScandir.scandir('path', (error, stats) => { /* … */ }); +``` + +## API + +### .scandir(path, [optionsOrSettings], callback) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. + +```ts +fsScandir.scandir('path', (error, entries) => { /* … */ }); +fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); +fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); +``` + +### .scandirSync(path, [optionsOrSettings]) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. + +```ts +const entries = fsScandir.scandirSync('path'); +const entries = fsScandir.scandirSync('path', {}); +const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsScandir.Settings({ followSymbolicLinks: false }); + +const entries = fsScandir.scandirSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. +* `stats` (optional) — An instance of `fs.Stats` class. + +For example, the `scandir` call for `tools` directory with one directory inside: + +```ts +{ + dirent: Dirent { name: 'typedoc', /* … */ }, + name: 'typedoc', + path: 'tools/typedoc' +} +``` + +## Options + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} + +const settings = new fsScandir.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## `old` and `modern` mode + +This package has two modes that are used depending on the environment and parameters of use. + +### old + +* Node.js below `10.10` or when the `stats` option is enabled + +When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). + +### modern + +* Node.js 10.10+ and the `stats` option is disabled + +In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. + +This mode makes fewer calls to the file system. It's faster. + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts new file mode 100644 index 0000000..827f1db --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts @@ -0,0 +1,20 @@ +import type * as fsStat from '@nodelib/fs.stat'; +import type { Dirent, ErrnoException } from '../types'; +export interface ReaddirAsynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; + (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; +} +export interface ReaddirSynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }): Dirent[]; + (filepath: string): string[]; +} +export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { + readdir: ReaddirAsynchronousMethod; + readdirSync: ReaddirSynchronousMethod; +}; +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js new file mode 100644 index 0000000..f0fe022 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts new file mode 100644 index 0000000..33f1749 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.d.ts @@ -0,0 +1,4 @@ +/** + * IS `true` for Node.js 10.10 and greater. + */ +export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js new file mode 100644 index 0000000..7e3d441 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/constants.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; +const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +const SUPPORTED_MAJOR_VERSION = 10; +const SUPPORTED_MINOR_VERSION = 10; +const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; +const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; +/** + * IS `true` for Node.js 10.10 and greater. + */ +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts new file mode 100644 index 0000000..b9da83e --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Dirent, Entry } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function scandir(path: string, callback: AsyncCallback): void; +declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace scandir { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; +export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js new file mode 100644 index 0000000..99c70d3 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.scandirSync = exports.scandir = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function scandir(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.scandir = scandir; +function scandirSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.scandirSync = scandirSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts new file mode 100644 index 0000000..5829676 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts @@ -0,0 +1,7 @@ +/// +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; +export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js new file mode 100644 index 0000000..e8e2f0a --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/async.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const rpl = require("run-parallel"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings, callback) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + readdirWithFileTypes(directory, settings, callback); + return; + } + readdir(directory, settings, callback); +} +exports.read = read; +function readdirWithFileTypes(directory, settings, callback) { + settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const entries = dirents.map((dirent) => ({ + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + })); + if (!settings.followSymbolicLinks) { + callSuccessCallback(callback, entries); + return; + } + const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); + rpl(tasks, (rplError, rplEntries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, rplEntries); + }); + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function makeRplTaskEntry(entry, settings) { + return (done) => { + if (!entry.dirent.isSymbolicLink()) { + done(null, entry); + return; + } + settings.fs.stat(entry.path, (statError, stats) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + done(statError); + return; + } + done(null, entry); + return; + } + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + done(null, entry); + }); + }; +} +function readdir(directory, settings, callback) { + settings.fs.readdir(directory, (readdirError, names) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; + }); + rpl(tasks, (rplError, entries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, entries); + }); + }); +} +exports.readdir = readdir; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts new file mode 100644 index 0000000..2b4d08b --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts @@ -0,0 +1 @@ +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js new file mode 100644 index 0000000..8724cb5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/common.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = void 0; +function joinPathSegments(a, b, separator) { + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts new file mode 100644 index 0000000..e05c8f0 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts @@ -0,0 +1,5 @@ +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare function read(directory: string, settings: Settings): Entry[]; +export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; +export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js new file mode 100644 index 0000000..146db34 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/providers/sync.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + return readdirWithFileTypes(directory, settings); + } + return readdir(directory, settings); +} +exports.read = read; +function readdirWithFileTypes(directory, settings) { + const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); + return dirents.map((dirent) => { + const entry = { + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + }; + if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { + try { + const stats = settings.fs.statSync(entry.path); + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + } + catch (error) { + if (settings.throwErrorOnBrokenSymbolicLink) { + throw error; + } + } + } + return entry; + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function readdir(directory, settings) { + const names = settings.fs.readdirSync(directory); + return names.map((name) => { + const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const stats = fsStat.statSync(entryPath, settings.fsStatSettings); + const entry = { + name, + path: entryPath, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + return entry; + }); +} +exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts new file mode 100644 index 0000000..a0db115 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.d.ts @@ -0,0 +1,20 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLinks: boolean; + readonly fs: fs.FileSystemAdapter; + readonly pathSegmentSeparator: string; + readonly stats: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly fsStatSettings: fsStat.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js new file mode 100644 index 0000000..15a3e8c --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/settings.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.stats = this._getValue(this._options.stats, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + this.fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this.followSymbolicLinks, + fs: this.fs, + throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts new file mode 100644 index 0000000..f326c5e --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts @@ -0,0 +1,20 @@ +/// +import type * as fs from 'fs'; +export interface Entry { + dirent: Dirent; + name: string; + path: string; + stats?: Stats; +} +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; +export interface Dirent { + isBlockDevice: () => boolean; + isCharacterDevice: () => boolean; + isDirectory: () => boolean; + isFIFO: () => boolean; + isFile: () => boolean; + isSocket: () => boolean; + isSymbolicLink: () => boolean; + name: string; +} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts new file mode 100644 index 0000000..bb863f1 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts @@ -0,0 +1,2 @@ +import type { Dirent, Stats } from '../types'; +export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts new file mode 100644 index 0000000..1b41954 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts @@ -0,0 +1,2 @@ +import * as fs from './fs'; +export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js new file mode 100644 index 0000000..f5de129 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/out/utils/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fs = void 0; +const fs = require("./fs"); +exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json new file mode 100644 index 0000000..d3a8924 --- /dev/null +++ b/node_modules/@nodelib/fs.scandir/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.scandir", + "version": "2.1.5", + "description": "List files and directories inside the specified directory", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "scandir", + "readdir", + "dirent" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4", + "@types/run-parallel": "^1.1.0" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/@nodelib/fs.stat/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.stat/README.md b/node_modules/@nodelib/fs.stat/README.md new file mode 100644 index 0000000..686f047 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/README.md @@ -0,0 +1,126 @@ +# @nodelib/fs.stat + +> Get the status of a file with some features. + +## :bulb: Highlights + +Wrapper around standard method `fs.lstat` and `fs.stat` with some features. + +* :beginner: Normally follows symbolic link. +* :gear: Can safely work with broken symbolic link. + +## Install + +```console +npm install @nodelib/fs.stat +``` + +## Usage + +```ts +import * as fsStat from '@nodelib/fs.stat'; + +fsStat.stat('path', (error, stats) => { /* … */ }); +``` + +## API + +### .stat(path, [optionsOrSettings], callback) + +Returns an instance of `fs.Stats` class for provided path with standard callback-style. + +```ts +fsStat.stat('path', (error, stats) => { /* … */ }); +fsStat.stat('path', {}, (error, stats) => { /* … */ }); +fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); +``` + +### .statSync(path, [optionsOrSettings]) + +Returns an instance of `fs.Stats` class for provided path. + +```ts +const stats = fsStat.stat('path'); +const stats = fsStat.stat('path', {}); +const stats = fsStat.stat('path', new fsStat.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsStat.Settings({ followSymbolicLink: false }); + +const stats = fsStat.stat('path', settings); +``` + +## Options + +### `followSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. + +### `markSymbolicLink` + +* Type: `boolean` +* Default: `false` + +Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). + +> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; +} + +const settings = new fsStat.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts new file mode 100644 index 0000000..3af759c --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts @@ -0,0 +1,13 @@ +/// +import * as fs from 'fs'; +import type { ErrnoException } from '../types'; +export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; +export declare type StatSynchronousMethod = (path: string) => fs.Stats; +export interface FileSystemAdapter { + lstat: StatAsynchronousMethod; + stat: StatAsynchronousMethod; + lstatSync: StatSynchronousMethod; + statSync: StatSynchronousMethod; +} +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js new file mode 100644 index 0000000..8dc08c8 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/adapters/fs.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts new file mode 100644 index 0000000..f95db99 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Stats } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function stat(path: string, callback: AsyncCallback): void; +declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace stat { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; +export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js new file mode 100644 index 0000000..b23f751 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.statSync = exports.stat = exports.Settings = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function stat(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.stat = stat; +function statSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.statSync = statSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts new file mode 100644 index 0000000..85423ce --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts @@ -0,0 +1,4 @@ +import type Settings from '../settings'; +import type { ErrnoException, Stats } from '../types'; +export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; +export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js new file mode 100644 index 0000000..983ff0e --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/async.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings, callback) { + settings.fs.lstat(path, (lstatError, lstat) => { + if (lstatError !== null) { + callFailureCallback(callback, lstatError); + return; + } + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + callSuccessCallback(callback, lstat); + return; + } + settings.fs.stat(path, (statError, stat) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + callFailureCallback(callback, statError); + return; + } + callSuccessCallback(callback, lstat); + return; + } + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + callSuccessCallback(callback, stat); + }); + }); +} +exports.read = read; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts new file mode 100644 index 0000000..428c3d7 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts @@ -0,0 +1,3 @@ +import type Settings from '../settings'; +import type { Stats } from '../types'; +export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js new file mode 100644 index 0000000..1521c36 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings) { + const lstat = settings.fs.lstatSync(path); + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + return lstat; + } + try { + const stat = settings.fs.statSync(path); + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + return stat; + } + catch (error) { + if (!settings.throwErrorOnBrokenSymbolicLink) { + return lstat; + } + throw error; + } +} +exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts new file mode 100644 index 0000000..f4b3d44 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.d.ts @@ -0,0 +1,16 @@ +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLink?: boolean; + fs?: Partial; + markSymbolicLink?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLink: boolean; + readonly fs: fs.FileSystemAdapter; + readonly markSymbolicLink: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js new file mode 100644 index 0000000..111ec09 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/settings.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts new file mode 100644 index 0000000..74c08ed --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.d.ts @@ -0,0 +1,4 @@ +/// +import type * as fs from 'fs'; +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json new file mode 100644 index 0000000..f2540c2 --- /dev/null +++ b/node_modules/@nodelib/fs.stat/package.json @@ -0,0 +1,37 @@ +{ + "name": "@nodelib/fs.stat", + "version": "2.0.5", + "description": "Get the status of a file with some features", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "stat" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/@nodelib/fs.walk/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@nodelib/fs.walk/README.md b/node_modules/@nodelib/fs.walk/README.md new file mode 100644 index 0000000..6ccc08d --- /dev/null +++ b/node_modules/@nodelib/fs.walk/README.md @@ -0,0 +1,215 @@ +# @nodelib/fs.walk + +> A library for efficiently walking a directory recursively. + +## :bulb: Highlights + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). +* :gear: Built-in directories/files and error filtering system. +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.walk +``` + +## Usage + +```ts +import * as fsWalk from '@nodelib/fs.walk'; + +fsWalk.walk('path', (error, entries) => { /* … */ }); +``` + +## API + +### .walk(path, [optionsOrSettings], callback) + +Reads the directory recursively and asynchronously. Requires a callback function. + +> :book: If you want to use the Promise API, use `util.promisify`. + +```ts +fsWalk.walk('path', (error, entries) => { /* … */ }); +fsWalk.walk('path', {}, (error, entries) => { /* … */ }); +fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); +``` + +### .walkStream(path, [optionsOrSettings]) + +Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. + +```ts +const stream = fsWalk.walkStream('path'); +const stream = fsWalk.walkStream('path', {}); +const stream = fsWalk.walkStream('path', new fsWalk.Settings()); +``` + +### .walkSync(path, [optionsOrSettings]) + +Reads the directory recursively and synchronously. Returns an array of entries. + +```ts +const entries = fsWalk.walkSync('path'); +const entries = fsWalk.walkSync('path', {}); +const entries = fsWalk.walkSync('path', new fsWalk.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsWalk.Settings({ followSymbolicLinks: true }); + +const entries = fsWalk.walkSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. +* [`stats`] — An instance of `fs.Stats` class. + +## Options + +### basePath + +* Type: `string` +* Default: `undefined` + +By default, all paths are built relative to the root path. You can use this option to set custom root path. + +In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. + +```ts +fsWalk.walkSync('root'); // → ['root/file.txt'] +fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] +``` + +### concurrency + +* Type: `number` +* Default: `Infinity` + +The maximum number of concurrent calls to `fs.readdir`. + +> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). + +### deepFilter + +* Type: [`DeepFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the directory will be read deep or not. + +```ts +// Skip all directories that starts with `node_modules` +const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); +``` + +### entryFilter + +* Type: [`EntryFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the entry will be included to results or not. + +```ts +// Exclude all `.js` files from results +const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); +``` + +### errorFilter + +* Type: [`ErrorFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that allows you to skip errors that occur when reading directories. + +For example, you can skip `ENOENT` errors if required: + +```ts +// Skip all ENOENT errors +const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; +``` + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: `FileSystemAdapter` +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat: typeof fs.lstat; + stat: typeof fs.stat; + lstatSync: typeof fs.lstatSync; + statSync: typeof fs.statSync; + readdir: typeof fs.readdir; + readdirSync: typeof fs.readdirSync; +} + +const settings = new fsWalk.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts new file mode 100644 index 0000000..8864c7b --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.d.ts @@ -0,0 +1,14 @@ +/// +import type { Readable } from 'stream'; +import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; +import { AsyncCallback } from './providers/async'; +import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; +import type { Entry } from './types'; +declare function walk(directory: string, callback: AsyncCallback): void; +declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace walk { + function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; +declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; +export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js new file mode 100644 index 0000000..1520787 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function walk(directory, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; + } + new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); +} +exports.walk = walk; +function walkSync(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new sync_1.default(directory, settings); + return provider.read(); +} +exports.walkSync = walkSync; +function walkStream(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new stream_1.default(directory, settings); + return provider.read(); +} +exports.walkStream = walkStream; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts new file mode 100644 index 0000000..0f6717d --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts @@ -0,0 +1,12 @@ +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; +export default class AsyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + private readonly _storage; + constructor(_root: string, _settings: Settings); + read(callback: AsyncCallback): void; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js new file mode 100644 index 0000000..51d3be5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/async.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +class AsyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._storage = []; + } + read(callback) { + this._reader.onError((error) => { + callFailureCallback(callback, error); + }); + this._reader.onEntry((entry) => { + this._storage.push(entry); + }); + this._reader.onEnd(() => { + callSuccessCallback(callback, this._storage); + }); + this._reader.read(); + } +} +exports.default = AsyncProvider; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, entries) { + callback(null, entries); +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts new file mode 100644 index 0000000..874f60c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts @@ -0,0 +1,4 @@ +import AsyncProvider from './async'; +import StreamProvider from './stream'; +import SyncProvider from './sync'; +export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js new file mode 100644 index 0000000..4c2529c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; +const async_1 = require("./async"); +exports.AsyncProvider = async_1.default; +const stream_1 = require("./stream"); +exports.StreamProvider = stream_1.default; +const sync_1 = require("./sync"); +exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts new file mode 100644 index 0000000..294185f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from 'stream'; +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +export default class StreamProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + protected readonly _stream: Readable; + constructor(_root: string, _settings: Settings); + read(): Readable; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js new file mode 100644 index 0000000..51298b0 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/stream.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const async_1 = require("../readers/async"); +class StreamProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._stream = new stream_1.Readable({ + objectMode: true, + read: () => { }, + destroy: () => { + if (!this._reader.isDestroyed) { + this._reader.destroy(); + } + } + }); + } + read() { + this._reader.onError((error) => { + this._stream.emit('error', error); + }); + this._reader.onEntry((entry) => { + this._stream.push(entry); + }); + this._reader.onEnd(() => { + this._stream.push(null); + }); + this._reader.read(); + return this._stream; + } +} +exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts new file mode 100644 index 0000000..551c42e --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts @@ -0,0 +1,10 @@ +import SyncReader from '../readers/sync'; +import type Settings from '../settings'; +import type { Entry } from '../types'; +export default class SyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: SyncReader; + constructor(_root: string, _settings: Settings); + read(): Entry[]; +} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js new file mode 100644 index 0000000..faab6ca --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/providers/sync.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +class SyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new sync_1.default(this._root, this._settings); + } + read() { + return this._reader.read(); + } +} +exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts new file mode 100644 index 0000000..9acf4e6 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts @@ -0,0 +1,30 @@ +/// +import { EventEmitter } from 'events'; +import * as fsScandir from '@nodelib/fs.scandir'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +import Reader from './reader'; +declare type EntryEventCallback = (entry: Entry) => void; +declare type ErrorEventCallback = (error: Errno) => void; +declare type EndEventCallback = () => void; +export default class AsyncReader extends Reader { + protected readonly _settings: Settings; + protected readonly _scandir: typeof fsScandir.scandir; + protected readonly _emitter: EventEmitter; + private readonly _queue; + private _isFatalError; + private _isDestroyed; + constructor(_root: string, _settings: Settings); + read(): EventEmitter; + get isDestroyed(): boolean; + destroy(): void; + onEntry(callback: EntryEventCallback): void; + onError(callback: ErrorEventCallback): void; + onEnd(callback: EndEventCallback): void; + private _pushToQueue; + private _worker; + private _handleError; + private _handleEntry; + private _emitEntry; +} +export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js new file mode 100644 index 0000000..ebe8dd5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/async.js @@ -0,0 +1,97 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const fsScandir = require("@nodelib/fs.scandir"); +const fastq = require("fastq"); +const common = require("./common"); +const reader_1 = require("./reader"); +class AsyncReader extends reader_1.default { + constructor(_root, _settings) { + super(_root, _settings); + this._settings = _settings; + this._scandir = fsScandir.scandir; + this._emitter = new events_1.EventEmitter(); + this._queue = fastq(this._worker.bind(this), this._settings.concurrency); + this._isFatalError = false; + this._isDestroyed = false; + this._queue.drain = () => { + if (!this._isFatalError) { + this._emitter.emit('end'); + } + }; + } + read() { + this._isFatalError = false; + this._isDestroyed = false; + setImmediate(() => { + this._pushToQueue(this._root, this._settings.basePath); + }); + return this._emitter; + } + get isDestroyed() { + return this._isDestroyed; + } + destroy() { + if (this._isDestroyed) { + throw new Error('The reader is already destroyed'); + } + this._isDestroyed = true; + this._queue.killAndDrain(); + } + onEntry(callback) { + this._emitter.on('entry', callback); + } + onError(callback) { + this._emitter.once('error', callback); + } + onEnd(callback) { + this._emitter.once('end', callback); + } + _pushToQueue(directory, base) { + const queueItem = { directory, base }; + this._queue.push(queueItem, (error) => { + if (error !== null) { + this._handleError(error); + } + }); + } + _worker(item, done) { + this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { + if (error !== null) { + done(error, undefined); + return; + } + for (const entry of entries) { + this._handleEntry(entry, item.base); + } + done(null, undefined); + }); + } + _handleError(error) { + if (this._isDestroyed || !common.isFatalError(this._settings, error)) { + return; + } + this._isFatalError = true; + this._isDestroyed = true; + this._emitter.emit('error', error); + } + _handleEntry(entry, base) { + if (this._isDestroyed || this._isFatalError) { + return; + } + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._emitEntry(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _emitEntry(entry) { + this._emitter.emit('entry', entry); + } +} +exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts new file mode 100644 index 0000000..5985f97 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts @@ -0,0 +1,7 @@ +import type { FilterFunction } from '../settings'; +import type Settings from '../settings'; +import type { Errno } from '../types'; +export declare function isFatalError(settings: Settings, error: Errno): boolean; +export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; +export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js new file mode 100644 index 0000000..a93572f --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/common.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; +function isFatalError(settings, error) { + if (settings.errorFilter === null) { + return true; + } + return !settings.errorFilter(error); +} +exports.isFatalError = isFatalError; +function isAppliedFilter(filter, value) { + return filter === null || filter(value); +} +exports.isAppliedFilter = isAppliedFilter; +function replacePathSegmentSeparator(filepath, separator) { + return filepath.split(/[/\\]/).join(separator); +} +exports.replacePathSegmentSeparator = replacePathSegmentSeparator; +function joinPathSegments(a, b, separator) { + if (a === '') { + return b; + } + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts new file mode 100644 index 0000000..e1f383b --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts @@ -0,0 +1,6 @@ +import type Settings from '../settings'; +export default class Reader { + protected readonly _root: string; + protected readonly _settings: Settings; + constructor(_root: string, _settings: Settings); +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js new file mode 100644 index 0000000..782f07c --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/reader.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const common = require("./common"); +class Reader { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); + } +} +exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts new file mode 100644 index 0000000..af41033 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts @@ -0,0 +1,15 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry } from '../types'; +import Reader from './reader'; +export default class SyncReader extends Reader { + protected readonly _scandir: typeof fsScandir.scandirSync; + private readonly _storage; + private readonly _queue; + read(): Entry[]; + private _pushToQueue; + private _handleQueue; + private _handleDirectory; + private _handleError; + private _handleEntry; + private _pushToStorage; +} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js new file mode 100644 index 0000000..9a8d5a6 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/readers/sync.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsScandir = require("@nodelib/fs.scandir"); +const common = require("./common"); +const reader_1 = require("./reader"); +class SyncReader extends reader_1.default { + constructor() { + super(...arguments); + this._scandir = fsScandir.scandirSync; + this._storage = []; + this._queue = new Set(); + } + read() { + this._pushToQueue(this._root, this._settings.basePath); + this._handleQueue(); + return this._storage; + } + _pushToQueue(directory, base) { + this._queue.add({ directory, base }); + } + _handleQueue() { + for (const item of this._queue.values()) { + this._handleDirectory(item.directory, item.base); + } + } + _handleDirectory(directory, base) { + try { + const entries = this._scandir(directory, this._settings.fsScandirSettings); + for (const entry of entries) { + this._handleEntry(entry, base); + } + } + catch (error) { + this._handleError(error); + } + } + _handleError(error) { + if (!common.isFatalError(this._settings, error)) { + return; + } + throw error; + } + _handleEntry(entry, base) { + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._pushToStorage(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _pushToStorage(entry) { + this._storage.push(entry); + } +} +exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts new file mode 100644 index 0000000..d1c4b45 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.d.ts @@ -0,0 +1,30 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry, Errno } from './types'; +export declare type FilterFunction = (value: T) => boolean; +export declare type DeepFilterFunction = FilterFunction; +export declare type EntryFilterFunction = FilterFunction; +export declare type ErrorFilterFunction = FilterFunction; +export interface Options { + basePath?: string; + concurrency?: number; + deepFilter?: DeepFilterFunction; + entryFilter?: EntryFilterFunction; + errorFilter?: ErrorFilterFunction; + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly basePath?: string; + readonly concurrency: number; + readonly deepFilter: DeepFilterFunction | null; + readonly entryFilter: EntryFilterFunction | null; + readonly errorFilter: ErrorFilterFunction | null; + readonly pathSegmentSeparator: string; + readonly fsScandirSettings: fsScandir.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js new file mode 100644 index 0000000..d7a85c8 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/settings.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsScandir = require("@nodelib/fs.scandir"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.basePath = this._getValue(this._options.basePath, undefined); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); + this.deepFilter = this._getValue(this._options.deepFilter, null); + this.entryFilter = this._getValue(this._options.entryFilter, null); + this.errorFilter = this._getValue(this._options.errorFilter, null); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.fsScandirSettings = new fsScandir.Settings({ + followSymbolicLinks: this._options.followSymbolicLinks, + fs: this._options.fs, + pathSegmentSeparator: this._options.pathSegmentSeparator, + stats: this._options.stats, + throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts new file mode 100644 index 0000000..6ee9bd3 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.d.ts @@ -0,0 +1,8 @@ +/// +import type * as scandir from '@nodelib/fs.scandir'; +export declare type Entry = scandir.Entry; +export declare type Errno = NodeJS.ErrnoException; +export interface QueueItem { + directory: string; + base?: string; +} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json new file mode 100644 index 0000000..86bfce4 --- /dev/null +++ b/node_modules/@nodelib/fs.walk/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.walk", + "version": "1.2.8", + "description": "A library for efficiently walking a directory recursively", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "walk", + "scanner", + "crawler" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*", + "!out/**/tests/**" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" +} diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md new file mode 100644 index 0000000..f59dd60 --- /dev/null +++ b/node_modules/braces/README.md @@ -0,0 +1,586 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +- [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `10,000` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + }, +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + }, +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.keepEscaping + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Do not strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +- [brace expansion](#brace-expansion) is for generating new lists +- [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +- brace expansion can eat up a huge amount of processing resources +- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------------------- | ------------------- | ---------------------------- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------- | --------------------------- | ---------------------------- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +● expand - range (expanded) + braces x 53,167 ops/sec ±0.12% (102 runs sampled) + minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) +● expand - range (optimized for regex) + braces x 373,442 ops/sec ±0.04% (100 runs sampled) + minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) +● expand - nested ranges (expanded) + braces x 33,921 ops/sec ±0.09% (99 runs sampled) + minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) +● expand - nested ranges (optimized for regex) + braces x 287,479 ops/sec ±0.52% (98 runs sampled) + minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) +● expand - set (expanded) + braces x 238,243 ops/sec ±0.19% (97 runs sampled) + minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) +● expand - set (optimized for regex) + braces x 321,844 ops/sec ±0.10% (97 runs sampled) + minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) +● expand - nested sets (expanded) + braces x 165,371 ops/sec ±0.42% (96 runs sampled) + minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) +● expand - nested sets (optimized for regex) + braces x 242,948 ops/sec ±0.12% (99 runs sampled) + minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| ----------- | ------------------------------------------------------------- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +- [GitHub Profile](https://github.com/jonschlinkert) +- [Twitter Profile](https://twitter.com/jonschlinkert) +- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +--- + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js new file mode 100644 index 0000000..d222c13 --- /dev/null +++ b/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (const pattern of input) { + const result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js new file mode 100644 index 0000000..dce69be --- /dev/null +++ b/node_modules/braces/lib/compile.js @@ -0,0 +1,60 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + const walk = (node, parent = {}) => { + const invalidBlock = utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + const invalid = invalidBlock === true || invalidNode === true; + const prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + + if (node.isClose === true) { + console.log('node.isClose', prefix, node.value); + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? prefix + node.value : '('; + } + + if (node.type === 'close') { + return invalid ? prefix + node.value : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (const child of node.nodes) { + output += walk(child, node); + } + } + + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js new file mode 100644 index 0000000..2bb3b88 --- /dev/null +++ b/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 10000, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js new file mode 100644 index 0000000..35b2c41 --- /dev/null +++ b/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + const result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (const item of queue) { + if (Array.isArray(item)) { + for (const value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; + + const walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + const enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + const child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js new file mode 100644 index 0000000..3a6988e --- /dev/null +++ b/node_modules/braces/lib/parse.js @@ -0,0 +1,331 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + const opts = options || {}; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + const ast = { type: 'root', input, nodes: [] }; + const stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + const length = input.length; + let index = 0; + let depth = 0; + let value; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + const open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + const brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + const type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + const open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + const siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + const before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + const parent = stack[stack.length - 1]; + const index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js new file mode 100644 index 0000000..8bcf872 --- /dev/null +++ b/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + const stringify = (node, parent = {}) => { + const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (const child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js new file mode 100644 index 0000000..d19311f --- /dev/null +++ b/node_modules/braces/lib/utils.js @@ -0,0 +1,122 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + const node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + const ele = arr[i]; + + if (Array.isArray(ele)) { + flat(ele); + continue; + } + + if (ele !== undefined) { + result.push(ele); + } + } + return result; + }; + + flat(args); + return result; +}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json new file mode 100644 index 0000000..c3c056e --- /dev/null +++ b/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.3", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.1.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/cross-spawn/LICENSE b/node_modules/cross-spawn/LICENSE new file mode 100644 index 0000000..8407b9a --- /dev/null +++ b/node_modules/cross-spawn/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Made With MOXY Lda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/cross-spawn/README.md b/node_modules/cross-spawn/README.md new file mode 100644 index 0000000..1ed9252 --- /dev/null +++ b/node_modules/cross-spawn/README.md @@ -0,0 +1,89 @@ +# cross-spawn + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][ci-image]][ci-url] [![Build status][appveyor-image]][appveyor-url] + +[npm-url]:https://npmjs.org/package/cross-spawn +[downloads-image]:https://img.shields.io/npm/dm/cross-spawn.svg +[npm-image]:https://img.shields.io/npm/v/cross-spawn.svg +[ci-url]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml +[ci-image]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml/badge.svg +[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn +[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg + +A cross platform solution to node's spawn and spawnSync. + +## Installation + +Node.js version 8 and up: +`$ npm install cross-spawn` + +Node.js version 7 and under: +`$ npm install cross-spawn@6` + +## Why + +Node has issues when using spawn on Windows: + +- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318) +- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix)) +- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367) +- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`) +- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149) +- No `options.shell` support on node `` where `` must not contain any arguments. +If you would like to have the shebang support improved, feel free to contribute via a pull-request. + +Remember to always test your code on Windows! + + +## Tests + +`$ npm test` +`$ npm test -- --watch` during development + + +## License + +Released under the [MIT License](https://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/cross-spawn/index.js b/node_modules/cross-spawn/index.js new file mode 100644 index 0000000..5509742 --- /dev/null +++ b/node_modules/cross-spawn/index.js @@ -0,0 +1,39 @@ +'use strict'; + +const cp = require('child_process'); +const parse = require('./lib/parse'); +const enoent = require('./lib/enoent'); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; diff --git a/node_modules/cross-spawn/lib/enoent.js b/node_modules/cross-spawn/lib/enoent.js new file mode 100644 index 0000000..da33471 --- /dev/null +++ b/node_modules/cross-spawn/lib/enoent.js @@ -0,0 +1,59 @@ +'use strict'; + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} + +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } + + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} + +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } + + return null; +} + +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } + + return null; +} + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; diff --git a/node_modules/cross-spawn/lib/parse.js b/node_modules/cross-spawn/lib/parse.js new file mode 100644 index 0000000..0129d74 --- /dev/null +++ b/node_modules/cross-spawn/lib/parse.js @@ -0,0 +1,91 @@ +'use strict'; + +const path = require('path'); +const resolveCommand = require('./util/resolveCommand'); +const escape = require('./util/escape'); +const readShebang = require('./util/readShebang'); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } + + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); + + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } + + return parsed; +} + +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } + + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; + + // Delegate further parsing to shell or non-shell + return options.shell ? parsed : parseNonShell(parsed); +} + +module.exports = parse; diff --git a/node_modules/cross-spawn/lib/util/escape.js b/node_modules/cross-spawn/lib/util/escape.js new file mode 100644 index 0000000..7bf2905 --- /dev/null +++ b/node_modules/cross-spawn/lib/util/escape.js @@ -0,0 +1,47 @@ +'use strict'; + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input + // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; diff --git a/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/cross-spawn/lib/util/readShebang.js new file mode 100644 index 0000000..5e83733 --- /dev/null +++ b/node_modules/cross-spawn/lib/util/readShebang.js @@ -0,0 +1,23 @@ +'use strict'; + +const fs = require('fs'); +const shebangCommand = require('shebang-command'); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + const buffer = Buffer.alloc(size); + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; diff --git a/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/cross-spawn/lib/util/resolveCommand.js new file mode 100644 index 0000000..7972455 --- /dev/null +++ b/node_modules/cross-spawn/lib/util/resolveCommand.js @@ -0,0 +1,52 @@ +'use strict'; + +const path = require('path'); +const which = require('which'); +const getPathKey = require('path-key'); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const env = parsed.options.env || process.env; + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + // Worker threads do not have process.chdir() + const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (shouldSwitchCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } + + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: env[getPathKey({ env })], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + if (shouldSwitchCwd) { + process.chdir(cwd); + } + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; +} + +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} + +module.exports = resolveCommand; diff --git a/node_modules/cross-spawn/package.json b/node_modules/cross-spawn/package.json new file mode 100644 index 0000000..24b2eb4 --- /dev/null +++ b/node_modules/cross-spawn/package.json @@ -0,0 +1,73 @@ +{ + "name": "cross-spawn", + "version": "7.0.6", + "description": "Cross platform child_process#spawn and child_process#spawnSync", + "keywords": [ + "spawn", + "spawnSync", + "windows", + "cross-platform", + "path-ext", + "shebang", + "cmd", + "execute" + ], + "author": "André Cruz ", + "homepage": "https://github.com/moxystudio/node-cross-spawn", + "repository": { + "type": "git", + "url": "git@github.com:moxystudio/node-cross-spawn.git" + }, + "license": "MIT", + "main": "index.js", + "files": [ + "lib" + ], + "scripts": { + "lint": "eslint .", + "test": "jest --env node --coverage", + "prerelease": "npm t && npm run lint", + "release": "standard-version", + "postrelease": "git push --follow-tags origin HEAD && npm publish" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.js": [ + "eslint --fix", + "git add" + ] + }, + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "devDependencies": { + "@commitlint/cli": "^8.1.0", + "@commitlint/config-conventional": "^8.1.0", + "babel-core": "^6.26.3", + "babel-jest": "^24.9.0", + "babel-preset-moxy": "^3.1.0", + "eslint": "^5.16.0", + "eslint-config-moxy": "^7.1.0", + "husky": "^3.0.5", + "jest": "^24.9.0", + "lint-staged": "^9.2.5", + "mkdirp": "^0.5.1", + "rimraf": "^3.0.0", + "standard-version": "^9.5.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/execa/index.d.ts b/node_modules/execa/index.d.ts new file mode 100644 index 0000000..417d535 --- /dev/null +++ b/node_modules/execa/index.d.ts @@ -0,0 +1,564 @@ +/// +import {ChildProcess} from 'child_process'; +import {Stream, Readable as ReadableStream} from 'stream'; + +declare namespace execa { + type StdioOption = + | 'pipe' + | 'ipc' + | 'ignore' + | 'inherit' + | Stream + | number + | undefined; + + interface CommonOptions { + /** + Kill the spawned process when the parent process exits unless either: + - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) + - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit + + @default true + */ + readonly cleanup?: boolean; + + /** + Prefer locally installed binaries when looking for a binary to execute. + + If you `$ npm install foo`, you can then `execa('foo')`. + + @default false + */ + readonly preferLocal?: boolean; + + /** + Preferred path to find locally installed binaries in (use with `preferLocal`). + + @default process.cwd() + */ + readonly localDir?: string; + + /** + Path to the Node.js executable to use in child processes. + + This can be either an absolute path or a path relative to the `cwd` option. + + Requires `preferLocal` to be `true`. + + For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. + + @default process.execPath + */ + readonly execPath?: string; + + /** + Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected. + + If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data. + + @default true + */ + readonly buffer?: boolean; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stdin?: StdioOption; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stdout?: StdioOption; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stderr?: StdioOption; + + /** + Setting this to `false` resolves the promise with the error instead of rejecting it. + + @default true + */ + readonly reject?: boolean; + + /** + Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved. + + @default false + */ + readonly all?: boolean; + + /** + Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. + + @default true + */ + readonly stripFinalNewline?: boolean; + + /** + Set to `false` if you don't want to extend the environment variables when providing the `env` property. + + @default true + */ + readonly extendEnv?: boolean; + + /** + Current working directory of the child process. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. + + @default process.env + */ + readonly env?: NodeJS.ProcessEnv; + + /** + Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. + */ + readonly argv0?: string; + + /** + Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. + + @default 'pipe' + */ + readonly stdio?: 'pipe' | 'ignore' | 'inherit' | readonly StdioOption[]; + + /** + Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execa.node()`: + - `json`: Uses `JSON.stringify()` and `JSON.parse()`. + - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) + + Requires Node.js `13.2.0` or later. + + [More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) + + @default 'json' + */ + readonly serialization?: 'json' | 'advanced'; + + /** + Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). + + @default false + */ + readonly detached?: boolean; + + /** + Sets the user identity of the process. + */ + readonly uid?: number; + + /** + Sets the group identity of the process. + */ + readonly gid?: number; + + /** + If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. + + We recommend against using this option since it is: + - not cross-platform, encouraging shell-specific syntax. + - slower, because of the additional shell interpretation. + - unsafe, potentially allowing command injection. + + @default false + */ + readonly shell?: boolean | string; + + /** + Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. + + @default 'utf8' + */ + readonly encoding?: EncodingType; + + /** + If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds. + + @default 0 + */ + readonly timeout?: number; + + /** + Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB. + + @default 100_000_000 + */ + readonly maxBuffer?: number; + + /** + Signal value to be used when the spawned process will be killed. + + @default 'SIGTERM' + */ + readonly killSignal?: string | number; + + /** + If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. + + @default false + */ + readonly windowsVerbatimArguments?: boolean; + + /** + On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. + + @default true + */ + readonly windowsHide?: boolean; + } + + interface Options extends CommonOptions { + /** + Write some input to the `stdin` of your binary. + */ + readonly input?: string | Buffer | ReadableStream; + } + + interface SyncOptions extends CommonOptions { + /** + Write some input to the `stdin` of your binary. + */ + readonly input?: string | Buffer; + } + + interface NodeOptions extends Options { + /** + The Node.js executable to use. + + @default process.execPath + */ + readonly nodePath?: string; + + /** + List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. + + @default process.execArgv + */ + readonly nodeOptions?: string[]; + } + + interface ExecaReturnBase { + /** + The file and arguments that were run, for logging purposes. + + This is not escaped and should not be executed directly as a process, including using `execa()` or `execa.command()`. + */ + command: string; + + /** + Same as `command` but escaped. + + This is meant to be copy and pasted into a shell, for debugging purposes. + Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execa.command()`. + */ + escapedCommand: string; + + /** + The numeric exit code of the process that was run. + */ + exitCode: number; + + /** + The output of the process on stdout. + */ + stdout: StdoutStderrType; + + /** + The output of the process on stderr. + */ + stderr: StdoutStderrType; + + /** + Whether the process failed to run. + */ + failed: boolean; + + /** + Whether the process timed out. + */ + timedOut: boolean; + + /** + Whether the process was killed. + */ + killed: boolean; + + /** + The name of the signal that was used to terminate the process. For example, `SIGFPE`. + + If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. + */ + signal?: string; + + /** + A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. + + If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. + */ + signalDescription?: string; + } + + interface ExecaSyncReturnValue + extends ExecaReturnBase { + } + + /** + Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. + + The child process fails when: + - its exit code is not `0` + - it was killed with a signal + - timing out + - being canceled + - there's not enough memory or there are already too many child processes + */ + interface ExecaReturnValue + extends ExecaSyncReturnValue { + /** + The output of the process with `stdout` and `stderr` interleaved. + + This is `undefined` if either: + - the `all` option is `false` (default value) + - `execa.sync()` was used + */ + all?: StdoutErrorType; + + /** + Whether the process was canceled. + */ + isCanceled: boolean; + } + + interface ExecaSyncError + extends Error, + ExecaReturnBase { + /** + Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored. + + The child process stderr then stdout are appended to the end, separated with newlines and not interleaved. + */ + message: string; + + /** + This is the same as the `message` property except it does not include the child process stdout/stderr. + */ + shortMessage: string; + + /** + Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. + + This is `undefined` unless the child process exited due to an `error` event or a timeout. + */ + originalMessage?: string; + } + + interface ExecaError + extends ExecaSyncError { + /** + The output of the process with `stdout` and `stderr` interleaved. + + This is `undefined` if either: + - the `all` option is `false` (default value) + - `execa.sync()` was used + */ + all?: StdoutErrorType; + + /** + Whether the process was canceled. + */ + isCanceled: boolean; + } + + interface KillOptions { + /** + Milliseconds to wait for the child process to terminate before sending `SIGKILL`. + + Can be disabled with `false`. + + @default 5000 + */ + forceKillAfterTimeout?: number | false; + } + + interface ExecaChildPromise { + /** + Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). + + This is `undefined` if either: + - the `all` option is `false` (the default value) + - both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) + */ + all?: ReadableStream; + + catch( + onRejected?: (reason: ExecaError) => ResultType | PromiseLike + ): Promise | ResultType>; + + /** + Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. + */ + kill(signal?: string, options?: KillOptions): void; + + /** + Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. + */ + cancel(): void; + } + + type ExecaChildProcess = ChildProcess & + ExecaChildPromise & + Promise>; +} + +declare const execa: { + /** + Execute a file. + + Think of this as a mix of `child_process.execFile` and `child_process.spawn`. + + @param file - The program/script to execute. + @param arguments - Arguments to pass to `file` on execution. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + + @example + ``` + import execa = require('execa'); + + (async () => { + const {stdout} = await execa('echo', ['unicorns']); + console.log(stdout); + //=> 'unicorns' + + // Cancelling a spawned process + + const subprocess = execa('node'); + + setTimeout(() => { + subprocess.cancel() + }, 1000); + + try { + await subprocess; + } catch (error) { + console.log(subprocess.killed); // true + console.log(error.isCanceled); // true + } + })(); + + // Pipe the child process stdout to the current stdout + execa('echo', ['unicorns']).stdout.pipe(process.stdout); + ``` + */ + ( + file: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + ( + file: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + (file: string, options?: execa.Options): execa.ExecaChildProcess; + (file: string, options?: execa.Options): execa.ExecaChildProcess< + Buffer + >; + + /** + Execute a file synchronously. + + This method throws an `Error` if the command fails. + + @param file - The program/script to execute. + @param arguments - Arguments to pass to `file` on execution. + @returns A result `Object` with `stdout` and `stderr` properties. + */ + sync( + file: string, + arguments?: readonly string[], + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + sync( + file: string, + arguments?: readonly string[], + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + sync(file: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + sync( + file: string, + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + + /** + Same as `execa()` except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. + + If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. + + The `shell` option must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. + + @param command - The program/script to execute and its arguments. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + + @example + ``` + import execa = require('execa'); + + (async () => { + const {stdout} = await execa.command('echo unicorns'); + console.log(stdout); + //=> 'unicorns' + })(); + ``` + */ + command(command: string, options?: execa.Options): execa.ExecaChildProcess; + command(command: string, options?: execa.Options): execa.ExecaChildProcess; + + /** + Same as `execa.command()` but synchronous. + + @param command - The program/script to execute and its arguments. + @returns A result `Object` with `stdout` and `stderr` properties. + */ + commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + + /** + Execute a Node.js script as a child process. + + Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): + - the current Node version and options are used. This can be overridden using the `nodePath` and `nodeArguments` options. + - the `shell` option cannot be used + - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) + + @param scriptPath - Node.js script to execute. + @param arguments - Arguments to pass to `scriptPath` on execution. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + */ + node( + scriptPath: string, + arguments?: readonly string[], + options?: execa.NodeOptions + ): execa.ExecaChildProcess; + node( + scriptPath: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; + node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; +}; + +export = execa; diff --git a/node_modules/execa/index.js b/node_modules/execa/index.js new file mode 100644 index 0000000..6fc9f12 --- /dev/null +++ b/node_modules/execa/index.js @@ -0,0 +1,268 @@ +'use strict'; +const path = require('path'); +const childProcess = require('child_process'); +const crossSpawn = require('cross-spawn'); +const stripFinalNewline = require('strip-final-newline'); +const npmRunPath = require('npm-run-path'); +const onetime = require('onetime'); +const makeError = require('./lib/error'); +const normalizeStdio = require('./lib/stdio'); +const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = require('./lib/kill'); +const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = require('./lib/stream'); +const {mergePromise, getSpawnedPromise} = require('./lib/promise'); +const {joinCommand, parseCommand, getEscapedCommand} = require('./lib/command'); + +const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100; + +const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => { + const env = extendEnv ? {...process.env, ...envOption} : envOption; + + if (preferLocal) { + return npmRunPath.env({env, cwd: localDir, execPath}); + } + + return env; +}; + +const handleArguments = (file, args, options = {}) => { + const parsed = crossSpawn._parse(file, args, options); + file = parsed.command; + args = parsed.args; + options = parsed.options; + + options = { + maxBuffer: DEFAULT_MAX_BUFFER, + buffer: true, + stripFinalNewline: true, + extendEnv: true, + preferLocal: false, + localDir: options.cwd || process.cwd(), + execPath: process.execPath, + encoding: 'utf8', + reject: true, + cleanup: true, + all: false, + windowsHide: true, + ...options + }; + + options.env = getEnv(options); + + options.stdio = normalizeStdio(options); + + if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') { + // #116 + args.unshift('/q'); + } + + return {file, args, options, parsed}; +}; + +const handleOutput = (options, value, error) => { + if (typeof value !== 'string' && !Buffer.isBuffer(value)) { + // When `execa.sync()` errors, we normalize it to '' to mimic `execa()` + return error === undefined ? undefined : ''; + } + + if (options.stripFinalNewline) { + return stripFinalNewline(value); + } + + return value; +}; + +const execa = (file, args, options) => { + const parsed = handleArguments(file, args, options); + const command = joinCommand(file, args); + const escapedCommand = getEscapedCommand(file, args); + + validateTimeout(parsed.options); + + let spawned; + try { + spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options); + } catch (error) { + // Ensure the returned error is always both a promise and a child process + const dummySpawned = new childProcess.ChildProcess(); + const errorPromise = Promise.reject(makeError({ + error, + stdout: '', + stderr: '', + all: '', + command, + escapedCommand, + parsed, + timedOut: false, + isCanceled: false, + killed: false + })); + return mergePromise(dummySpawned, errorPromise); + } + + const spawnedPromise = getSpawnedPromise(spawned); + const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise); + const processDone = setExitHandler(spawned, parsed.options, timedPromise); + + const context = {isCanceled: false}; + + spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned)); + spawned.cancel = spawnedCancel.bind(null, spawned, context); + + const handlePromise = async () => { + const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone); + const stdout = handleOutput(parsed.options, stdoutResult); + const stderr = handleOutput(parsed.options, stderrResult); + const all = handleOutput(parsed.options, allResult); + + if (error || exitCode !== 0 || signal !== null) { + const returnedError = makeError({ + error, + exitCode, + signal, + stdout, + stderr, + all, + command, + escapedCommand, + parsed, + timedOut, + isCanceled: context.isCanceled, + killed: spawned.killed + }); + + if (!parsed.options.reject) { + return returnedError; + } + + throw returnedError; + } + + return { + command, + escapedCommand, + exitCode: 0, + stdout, + stderr, + all, + failed: false, + timedOut: false, + isCanceled: false, + killed: false + }; + }; + + const handlePromiseOnce = onetime(handlePromise); + + handleInput(spawned, parsed.options.input); + + spawned.all = makeAllStream(spawned, parsed.options); + + return mergePromise(spawned, handlePromiseOnce); +}; + +module.exports = execa; + +module.exports.sync = (file, args, options) => { + const parsed = handleArguments(file, args, options); + const command = joinCommand(file, args); + const escapedCommand = getEscapedCommand(file, args); + + validateInputSync(parsed.options); + + let result; + try { + result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options); + } catch (error) { + throw makeError({ + error, + stdout: '', + stderr: '', + all: '', + command, + escapedCommand, + parsed, + timedOut: false, + isCanceled: false, + killed: false + }); + } + + const stdout = handleOutput(parsed.options, result.stdout, result.error); + const stderr = handleOutput(parsed.options, result.stderr, result.error); + + if (result.error || result.status !== 0 || result.signal !== null) { + const error = makeError({ + stdout, + stderr, + error: result.error, + signal: result.signal, + exitCode: result.status, + command, + escapedCommand, + parsed, + timedOut: result.error && result.error.code === 'ETIMEDOUT', + isCanceled: false, + killed: result.signal !== null + }); + + if (!parsed.options.reject) { + return error; + } + + throw error; + } + + return { + command, + escapedCommand, + exitCode: 0, + stdout, + stderr, + failed: false, + timedOut: false, + isCanceled: false, + killed: false + }; +}; + +module.exports.command = (command, options) => { + const [file, ...args] = parseCommand(command); + return execa(file, args, options); +}; + +module.exports.commandSync = (command, options) => { + const [file, ...args] = parseCommand(command); + return execa.sync(file, args, options); +}; + +module.exports.node = (scriptPath, args, options = {}) => { + if (args && !Array.isArray(args) && typeof args === 'object') { + options = args; + args = []; + } + + const stdio = normalizeStdio.node(options); + const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect')); + + const { + nodePath = process.execPath, + nodeOptions = defaultExecArgv + } = options; + + return execa( + nodePath, + [ + ...nodeOptions, + scriptPath, + ...(Array.isArray(args) ? args : []) + ], + { + ...options, + stdin: undefined, + stdout: undefined, + stderr: undefined, + stdio, + shell: false + } + ); +}; diff --git a/node_modules/execa/lib/command.js b/node_modules/execa/lib/command.js new file mode 100644 index 0000000..859b006 --- /dev/null +++ b/node_modules/execa/lib/command.js @@ -0,0 +1,52 @@ +'use strict'; +const normalizeArgs = (file, args = []) => { + if (!Array.isArray(args)) { + return [file]; + } + + return [file, ...args]; +}; + +const NO_ESCAPE_REGEXP = /^[\w.-]+$/; +const DOUBLE_QUOTES_REGEXP = /"/g; + +const escapeArg = arg => { + if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) { + return arg; + } + + return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`; +}; + +const joinCommand = (file, args) => { + return normalizeArgs(file, args).join(' '); +}; + +const getEscapedCommand = (file, args) => { + return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' '); +}; + +const SPACES_REGEXP = / +/g; + +// Handle `execa.command()` +const parseCommand = command => { + const tokens = []; + for (const token of command.trim().split(SPACES_REGEXP)) { + // Allow spaces to be escaped by a backslash if not meant as a delimiter + const previousToken = tokens[tokens.length - 1]; + if (previousToken && previousToken.endsWith('\\')) { + // Merge previous token with current one + tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`; + } else { + tokens.push(token); + } + } + + return tokens; +}; + +module.exports = { + joinCommand, + getEscapedCommand, + parseCommand +}; diff --git a/node_modules/execa/lib/error.js b/node_modules/execa/lib/error.js new file mode 100644 index 0000000..4214467 --- /dev/null +++ b/node_modules/execa/lib/error.js @@ -0,0 +1,88 @@ +'use strict'; +const {signalsByName} = require('human-signals'); + +const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => { + if (timedOut) { + return `timed out after ${timeout} milliseconds`; + } + + if (isCanceled) { + return 'was canceled'; + } + + if (errorCode !== undefined) { + return `failed with ${errorCode}`; + } + + if (signal !== undefined) { + return `was killed with ${signal} (${signalDescription})`; + } + + if (exitCode !== undefined) { + return `failed with exit code ${exitCode}`; + } + + return 'failed'; +}; + +const makeError = ({ + stdout, + stderr, + all, + error, + signal, + exitCode, + command, + escapedCommand, + timedOut, + isCanceled, + killed, + parsed: {options: {timeout}} +}) => { + // `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`. + // We normalize them to `undefined` + exitCode = exitCode === null ? undefined : exitCode; + signal = signal === null ? undefined : signal; + const signalDescription = signal === undefined ? undefined : signalsByName[signal].description; + + const errorCode = error && error.code; + + const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}); + const execaMessage = `Command ${prefix}: ${command}`; + const isError = Object.prototype.toString.call(error) === '[object Error]'; + const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage; + const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n'); + + if (isError) { + error.originalMessage = error.message; + error.message = message; + } else { + error = new Error(message); + } + + error.shortMessage = shortMessage; + error.command = command; + error.escapedCommand = escapedCommand; + error.exitCode = exitCode; + error.signal = signal; + error.signalDescription = signalDescription; + error.stdout = stdout; + error.stderr = stderr; + + if (all !== undefined) { + error.all = all; + } + + if ('bufferedData' in error) { + delete error.bufferedData; + } + + error.failed = true; + error.timedOut = Boolean(timedOut); + error.isCanceled = isCanceled; + error.killed = killed && !timedOut; + + return error; +}; + +module.exports = makeError; diff --git a/node_modules/execa/lib/kill.js b/node_modules/execa/lib/kill.js new file mode 100644 index 0000000..287a142 --- /dev/null +++ b/node_modules/execa/lib/kill.js @@ -0,0 +1,115 @@ +'use strict'; +const os = require('os'); +const onExit = require('signal-exit'); + +const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5; + +// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior +const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => { + const killResult = kill(signal); + setKillTimeout(kill, signal, options, killResult); + return killResult; +}; + +const setKillTimeout = (kill, signal, options, killResult) => { + if (!shouldForceKill(signal, options, killResult)) { + return; + } + + const timeout = getForceKillAfterTimeout(options); + const t = setTimeout(() => { + kill('SIGKILL'); + }, timeout); + + // Guarded because there's no `.unref()` when `execa` is used in the renderer + // process in Electron. This cannot be tested since we don't run tests in + // Electron. + // istanbul ignore else + if (t.unref) { + t.unref(); + } +}; + +const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => { + return isSigterm(signal) && forceKillAfterTimeout !== false && killResult; +}; + +const isSigterm = signal => { + return signal === os.constants.signals.SIGTERM || + (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM'); +}; + +const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => { + if (forceKillAfterTimeout === true) { + return DEFAULT_FORCE_KILL_TIMEOUT; + } + + if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) { + throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`); + } + + return forceKillAfterTimeout; +}; + +// `childProcess.cancel()` +const spawnedCancel = (spawned, context) => { + const killResult = spawned.kill(); + + if (killResult) { + context.isCanceled = true; + } +}; + +const timeoutKill = (spawned, signal, reject) => { + spawned.kill(signal); + reject(Object.assign(new Error('Timed out'), {timedOut: true, signal})); +}; + +// `timeout` option handling +const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => { + if (timeout === 0 || timeout === undefined) { + return spawnedPromise; + } + + let timeoutId; + const timeoutPromise = new Promise((resolve, reject) => { + timeoutId = setTimeout(() => { + timeoutKill(spawned, killSignal, reject); + }, timeout); + }); + + const safeSpawnedPromise = spawnedPromise.finally(() => { + clearTimeout(timeoutId); + }); + + return Promise.race([timeoutPromise, safeSpawnedPromise]); +}; + +const validateTimeout = ({timeout}) => { + if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) { + throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`); + } +}; + +// `cleanup` option handling +const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => { + if (!cleanup || detached) { + return timedPromise; + } + + const removeExitHandler = onExit(() => { + spawned.kill(); + }); + + return timedPromise.finally(() => { + removeExitHandler(); + }); +}; + +module.exports = { + spawnedKill, + spawnedCancel, + setupTimeout, + validateTimeout, + setExitHandler +}; diff --git a/node_modules/execa/lib/promise.js b/node_modules/execa/lib/promise.js new file mode 100644 index 0000000..bd9d523 --- /dev/null +++ b/node_modules/execa/lib/promise.js @@ -0,0 +1,46 @@ +'use strict'; + +const nativePromisePrototype = (async () => {})().constructor.prototype; +const descriptors = ['then', 'catch', 'finally'].map(property => [ + property, + Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property) +]); + +// The return value is a mixin of `childProcess` and `Promise` +const mergePromise = (spawned, promise) => { + for (const [property, descriptor] of descriptors) { + // Starting the main `promise` is deferred to avoid consuming streams + const value = typeof promise === 'function' ? + (...args) => Reflect.apply(descriptor.value, promise(), args) : + descriptor.value.bind(promise); + + Reflect.defineProperty(spawned, property, {...descriptor, value}); + } + + return spawned; +}; + +// Use promises instead of `child_process` events +const getSpawnedPromise = spawned => { + return new Promise((resolve, reject) => { + spawned.on('exit', (exitCode, signal) => { + resolve({exitCode, signal}); + }); + + spawned.on('error', error => { + reject(error); + }); + + if (spawned.stdin) { + spawned.stdin.on('error', error => { + reject(error); + }); + } + }); +}; + +module.exports = { + mergePromise, + getSpawnedPromise +}; + diff --git a/node_modules/execa/lib/stdio.js b/node_modules/execa/lib/stdio.js new file mode 100644 index 0000000..45129ed --- /dev/null +++ b/node_modules/execa/lib/stdio.js @@ -0,0 +1,52 @@ +'use strict'; +const aliases = ['stdin', 'stdout', 'stderr']; + +const hasAlias = options => aliases.some(alias => options[alias] !== undefined); + +const normalizeStdio = options => { + if (!options) { + return; + } + + const {stdio} = options; + + if (stdio === undefined) { + return aliases.map(alias => options[alias]); + } + + if (hasAlias(options)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`); + } + + if (typeof stdio === 'string') { + return stdio; + } + + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } + + const length = Math.max(stdio.length, aliases.length); + return Array.from({length}, (value, index) => stdio[index]); +}; + +module.exports = normalizeStdio; + +// `ipc` is pushed unless it is already present +module.exports.node = options => { + const stdio = normalizeStdio(options); + + if (stdio === 'ipc') { + return 'ipc'; + } + + if (stdio === undefined || typeof stdio === 'string') { + return [stdio, stdio, stdio, 'ipc']; + } + + if (stdio.includes('ipc')) { + return stdio; + } + + return [...stdio, 'ipc']; +}; diff --git a/node_modules/execa/lib/stream.js b/node_modules/execa/lib/stream.js new file mode 100644 index 0000000..d445dd4 --- /dev/null +++ b/node_modules/execa/lib/stream.js @@ -0,0 +1,97 @@ +'use strict'; +const isStream = require('is-stream'); +const getStream = require('get-stream'); +const mergeStream = require('merge-stream'); + +// `input` option +const handleInput = (spawned, input) => { + // Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852 + // @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0 + if (input === undefined || spawned.stdin === undefined) { + return; + } + + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); + } +}; + +// `all` interleaves `stdout` and `stderr` +const makeAllStream = (spawned, {all}) => { + if (!all || (!spawned.stdout && !spawned.stderr)) { + return; + } + + const mixed = mergeStream(); + + if (spawned.stdout) { + mixed.add(spawned.stdout); + } + + if (spawned.stderr) { + mixed.add(spawned.stderr); + } + + return mixed; +}; + +// On failure, `result.stdout|stderr|all` should contain the currently buffered stream +const getBufferedData = async (stream, streamPromise) => { + if (!stream) { + return; + } + + stream.destroy(); + + try { + return await streamPromise; + } catch (error) { + return error.bufferedData; + } +}; + +const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => { + if (!stream || !buffer) { + return; + } + + if (encoding) { + return getStream(stream, {encoding, maxBuffer}); + } + + return getStream.buffer(stream, {maxBuffer}); +}; + +// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all) +const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => { + const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer}); + const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer}); + const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2}); + + try { + return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]); + } catch (error) { + return Promise.all([ + {error, signal: error.signal, timedOut: error.timedOut}, + getBufferedData(stdout, stdoutPromise), + getBufferedData(stderr, stderrPromise), + getBufferedData(all, allPromise) + ]); + } +}; + +const validateInputSync = ({input}) => { + if (isStream(input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); + } +}; + +module.exports = { + handleInput, + makeAllStream, + getSpawnedResult, + validateInputSync +}; + diff --git a/node_modules/execa/license b/node_modules/execa/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/execa/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/execa/package.json b/node_modules/execa/package.json new file mode 100644 index 0000000..22556f2 --- /dev/null +++ b/node_modules/execa/package.json @@ -0,0 +1,74 @@ +{ + "name": "execa", + "version": "5.1.1", + "description": "Process execution for humans", + "license": "MIT", + "repository": "sindresorhus/execa", + "funding": "https://github.com/sindresorhus/execa?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "lib" + ], + "keywords": [ + "exec", + "child", + "process", + "execute", + "fork", + "execfile", + "spawn", + "file", + "shell", + "bin", + "binary", + "binaries", + "npm", + "path", + "local" + ], + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "devDependencies": { + "@types/node": "^14.14.10", + "ava": "^2.4.0", + "get-node": "^11.0.1", + "is-running": "^2.1.0", + "nyc": "^15.1.0", + "p-event": "^4.2.0", + "tempfile": "^3.0.0", + "tsd": "^0.13.1", + "xo": "^0.35.0" + }, + "nyc": { + "reporter": [ + "text", + "lcov" + ], + "exclude": [ + "**/fixtures/**", + "**/test.js", + "**/test/**" + ] + } +} diff --git a/node_modules/execa/readme.md b/node_modules/execa/readme.md new file mode 100644 index 0000000..843edbc --- /dev/null +++ b/node_modules/execa/readme.md @@ -0,0 +1,663 @@ + +
+ +[![Coverage Status](https://codecov.io/gh/sindresorhus/execa/branch/main/graph/badge.svg)](https://codecov.io/gh/sindresorhus/execa) + +> Process execution for humans + +## Why + +This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with: + +- Promise interface. +- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`. +- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform. +- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why) +- Higher max buffer. 100 MB instead of 200 KB. +- [Executes locally installed binaries by name.](#preferlocal) +- [Cleans up spawned processes when the parent process dies.](#cleanup) +- [Get interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal. [*(Async only)*](#execasyncfile-arguments-options) +- [Can specify file and arguments as a single string without a shell](#execacommandcommand-options) +- More descriptive errors. + +## Install + +``` +$ npm install execa +``` + +## Usage + +```js +const execa = require('execa'); + +(async () => { + const {stdout} = await execa('echo', ['unicorns']); + console.log(stdout); + //=> 'unicorns' +})(); +``` + +### Pipe the child process stdout to the parent + +```js +const execa = require('execa'); + +execa('echo', ['unicorns']).stdout.pipe(process.stdout); +``` + +### Handling Errors + +```js +const execa = require('execa'); + +(async () => { + // Catching an error + try { + await execa('unknown', ['command']); + } catch (error) { + console.log(error); + /* + { + message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', + errno: -2, + code: 'ENOENT', + syscall: 'spawn unknown', + path: 'unknown', + spawnargs: ['command'], + originalMessage: 'spawn unknown ENOENT', + shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', + command: 'unknown command', + escapedCommand: 'unknown command', + stdout: '', + stderr: '', + all: '', + failed: true, + timedOut: false, + isCanceled: false, + killed: false + } + */ + } + +})(); +``` + +### Cancelling a spawned process + +```js +const execa = require('execa'); + +(async () => { + const subprocess = execa('node'); + + setTimeout(() => { + subprocess.cancel(); + }, 1000); + + try { + await subprocess; + } catch (error) { + console.log(subprocess.killed); // true + console.log(error.isCanceled); // true + } +})() +``` + +### Catching an error with the sync method + +```js +try { + execa.sync('unknown', ['command']); +} catch (error) { + console.log(error); + /* + { + message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', + errno: -2, + code: 'ENOENT', + syscall: 'spawnSync unknown', + path: 'unknown', + spawnargs: ['command'], + originalMessage: 'spawnSync unknown ENOENT', + shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', + command: 'unknown command', + escapedCommand: 'unknown command', + stdout: '', + stderr: '', + all: '', + failed: true, + timedOut: false, + isCanceled: false, + killed: false + } + */ +} +``` + +### Kill a process + +Using SIGTERM, and after 2 seconds, kill it with SIGKILL. + +```js +const subprocess = execa('node'); + +setTimeout(() => { + subprocess.kill('SIGTERM', { + forceKillAfterTimeout: 2000 + }); +}, 1000); +``` + +## API + +### execa(file, arguments, options?) + +Execute a file. Think of this as a mix of [`child_process.execFile()`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback) and [`child_process.spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +No escaping/quoting is needed. + +Unless the [`shell`](#shell) option is used, no shell interpreter (Bash, `cmd.exe`, etc.) is used, so shell features such as variables substitution (`echo $PATH`) are not allowed. + +Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) which: + - is also a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult). + - exposes the following additional methods and properties. + +#### kill(signal?, options?) + +Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal) except: if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. + +##### options.forceKillAfterTimeout + +Type: `number | false`\ +Default: `5000` + +Milliseconds to wait for the child process to terminate before sending `SIGKILL`. + +Can be disabled with `false`. + +#### cancel() + +Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. + +#### all + +Type: `ReadableStream | undefined` + +Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). + +This is `undefined` if either: + - the [`all` option](#all-2) is `false` (the default value) + - both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) + +### execa.sync(file, arguments?, options?) + +Execute a file synchronously. + +Returns or throws a [`childProcessResult`](#childProcessResult). + +### execa.command(command, options?) + +Same as [`execa()`](#execafile-arguments-options) except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. + +If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. + +The [`shell` option](#shell) must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. + +### execa.commandSync(command, options?) + +Same as [`execa.command()`](#execacommand-command-options) but synchronous. + +Returns or throws a [`childProcessResult`](#childProcessResult). + +### execa.node(scriptPath, arguments?, options?) + +Execute a Node.js script as a child process. + +Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): + - the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options. + - the [`shell`](#shell) option cannot be used + - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) + +### childProcessResult + +Type: `object` + +Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. + +The child process [fails](#failed) when: +- its [exit code](#exitcode) is not `0` +- it was [killed](#killed) with a [signal](#signal) +- [timing out](#timedout) +- [being canceled](#iscanceled) +- there's not enough memory or there are already too many child processes + +#### command + +Type: `string` + +The file and arguments that were run, for logging purposes. + +This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). + +#### escapedCommand + +Type: `string` + +Same as [`command`](#command) but escaped. + +This is meant to be copy and pasted into a shell, for debugging purposes. +Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). + +#### exitCode + +Type: `number` + +The numeric exit code of the process that was run. + +#### stdout + +Type: `string | Buffer` + +The output of the process on stdout. + +#### stderr + +Type: `string | Buffer` + +The output of the process on stderr. + +#### all + +Type: `string | Buffer | undefined` + +The output of the process with `stdout` and `stderr` interleaved. + +This is `undefined` if either: + - the [`all` option](#all-2) is `false` (the default value) + - `execa.sync()` was used + +#### failed + +Type: `boolean` + +Whether the process failed to run. + +#### timedOut + +Type: `boolean` + +Whether the process timed out. + +#### isCanceled + +Type: `boolean` + +Whether the process was canceled. + +#### killed + +Type: `boolean` + +Whether the process was killed. + +#### signal + +Type: `string | undefined` + +The name of the signal that was used to terminate the process. For example, `SIGFPE`. + +If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. + +#### signalDescription + +Type: `string | undefined` + +A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. + +If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. + +#### message + +Type: `string` + +Error message when the child process failed to run. In addition to the [underlying error message](#originalMessage), it also contains some information related to why the child process errored. + +The child process [stderr](#stderr) then [stdout](#stdout) are appended to the end, separated with newlines and not interleaved. + +#### shortMessage + +Type: `string` + +This is the same as the [`message` property](#message) except it does not include the child process stdout/stderr. + +#### originalMessage + +Type: `string | undefined` + +Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. + +This is `undefined` unless the child process exited due to an `error` event or a timeout. + +### options + +Type: `object` + +#### cleanup + +Type: `boolean`\ +Default: `true` + +Kill the spawned process when the parent process exits unless either: + - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) + - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit + +#### preferLocal + +Type: `boolean`\ +Default: `false` + +Prefer locally installed binaries when looking for a binary to execute.\ +If you `$ npm install foo`, you can then `execa('foo')`. + +#### localDir + +Type: `string`\ +Default: `process.cwd()` + +Preferred path to find locally installed binaries in (use with `preferLocal`). + +#### execPath + +Type: `string`\ +Default: `process.execPath` (Current Node.js executable) + +Path to the Node.js executable to use in child processes. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + +Requires [`preferLocal`](#preferlocal) to be `true`. + +For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. + +#### buffer + +Type: `boolean`\ +Default: `true` + +Buffer the output from the spawned process. When set to `false`, you must read the output of [`stdout`](#stdout-1) and [`stderr`](#stderr-1) (or [`all`](#all) if the [`all`](#all-2) option is `true`). Otherwise the returned promise will not be resolved/rejected. + +If the spawned process fails, [`error.stdout`](#stdout), [`error.stderr`](#stderr), and [`error.all`](#all) will contain the buffered data. + +#### input + +Type: `string | Buffer | stream.Readable` + +Write some input to the `stdin` of your binary.\ +Streams are not allowed when using the synchronous methods. + +#### stdin + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stdout + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stderr + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### all + +Type: `boolean`\ +Default: `false` + +Add an `.all` property on the [promise](#all) and the [resolved value](#all-1). The property contains the output of the process with `stdout` and `stderr` interleaved. + +#### reject + +Type: `boolean`\ +Default: `true` + +Setting this to `false` resolves the promise with the error instead of rejecting it. + +#### stripFinalNewline + +Type: `boolean`\ +Default: `true` + +Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. + +#### extendEnv + +Type: `boolean`\ +Default: `true` + +Set to `false` if you don't want to extend the environment variables when providing the `env` property. + +--- + +Execa also accepts the below options which are the same as the options for [`child_process#spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options)/[`child_process#exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) + +#### cwd + +Type: `string`\ +Default: `process.cwd()` + +Current working directory of the child process. + +#### env + +Type: `object`\ +Default: `process.env` + +Environment key-value pairs. Extends automatically from `process.env`. Set [`extendEnv`](#extendenv) to `false` if you don't want this. + +#### argv0 + +Type: `string` + +Explicitly set the value of `argv[0]` sent to the child process. This will be set to `file` if not specified. + +#### stdio + +Type: `string | string[]`\ +Default: `pipe` + +Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. + +#### serialization + +Type: `string`\ +Default: `'json'` + +Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execa.node()`](#execanodescriptpath-arguments-options): + - `json`: Uses `JSON.stringify()` and `JSON.parse()`. + - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) + +Requires Node.js `13.2.0` or later. + +[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) + +#### detached + +Type: `boolean` + +Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). + +#### uid + +Type: `number` + +Sets the user identity of the process. + +#### gid + +Type: `number` + +Sets the group identity of the process. + +#### shell + +Type: `boolean | string`\ +Default: `false` + +If `true`, runs `file` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. + +We recommend against using this option since it is: +- not cross-platform, encouraging shell-specific syntax. +- slower, because of the additional shell interpretation. +- unsafe, potentially allowing command injection. + +#### encoding + +Type: `string | null`\ +Default: `utf8` + +Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. + +#### timeout + +Type: `number`\ +Default: `0` + +If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds. + +#### maxBuffer + +Type: `number`\ +Default: `100_000_000` (100 MB) + +Largest amount of data in bytes allowed on `stdout` or `stderr`. + +#### killSignal + +Type: `string | number`\ +Default: `SIGTERM` + +Signal value to be used when the spawned process will be killed. + +#### windowsVerbatimArguments + +Type: `boolean`\ +Default: `false` + +If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. + +#### windowsHide + +Type: `boolean`\ +Default: `true` + +On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. + +#### nodePath *(For `.node()` only)* + +Type: `string`\ +Default: [`process.execPath`](https://nodejs.org/api/process.html#process_process_execpath) + +Node.js executable used to create the child process. + +#### nodeOptions *(For `.node()` only)* + +Type: `string[]`\ +Default: [`process.execArgv`](https://nodejs.org/api/process.html#process_process_execargv) + +List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. + +## Tips + +### Retry on error + +Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package: + +```js +const pRetry = require('p-retry'); + +const run = async () => { + const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']); + return results; +}; + +(async () => { + console.log(await pRetry(run, {retries: 5})); +})(); +``` + +### Save and pipe output from a child process + +Let's say you want to show the output of a child process in real-time while also saving it to a variable. + +```js +const execa = require('execa'); + +const subprocess = execa('echo', ['foo']); +subprocess.stdout.pipe(process.stdout); + +(async () => { + const {stdout} = await subprocess; + console.log('child output:', stdout); +})(); +``` + +### Redirect output to a file + +```js +const execa = require('execa'); + +const subprocess = execa('echo', ['foo']) +subprocess.stdout.pipe(fs.createWriteStream('stdout.txt')) +``` + +### Redirect input from a file + +```js +const execa = require('execa'); + +const subprocess = execa('cat') +fs.createReadStream('stdin.txt').pipe(subprocess.stdin) +``` + +### Execute the current package's binary + +```js +const {getBinPathSync} = require('get-bin-path'); + +const binPath = getBinPathSync(); +const subprocess = execa(binPath); +``` + +`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up. + +## Related + +- [gulp-execa](https://github.com/ehmicky/gulp-execa) - Gulp plugin for `execa` +- [nvexeca](https://github.com/ehmicky/nvexeca) - Run `execa` using any Node.js version +- [sudo-prompt](https://github.com/jorangreef/sudo-prompt) - Run commands with elevated privileges. + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [@ehmicky](https://github.com/ehmicky) + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/fast-glob/LICENSE b/node_modules/fast-glob/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/fast-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/fast-glob/README.md b/node_modules/fast-glob/README.md new file mode 100644 index 0000000..1d7843a --- /dev/null +++ b/node_modules/fast-glob/README.md @@ -0,0 +1,830 @@ +# fast-glob + +> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. + +This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. + +## Table of Contents + +
+Details + +* [Highlights](#highlights) +* [Old and modern mode](#old-and-modern-mode) +* [Pattern syntax](#pattern-syntax) + * [Basic syntax](#basic-syntax) + * [Advanced syntax](#advanced-syntax) +* [Installation](#installation) +* [API](#api) + * [Asynchronous](#asynchronous) + * [Synchronous](#synchronous) + * [Stream](#stream) + * [patterns](#patterns) + * [[options]](#options) + * [Helpers](#helpers) + * [generateTasks](#generatetaskspatterns-options) + * [isDynamicPattern](#isdynamicpatternpattern-options) + * [escapePath](#escapepathpath) + * [convertPathToPattern](#convertpathtopatternpath) +* [Options](#options-3) + * [Common](#common) + * [concurrency](#concurrency) + * [cwd](#cwd) + * [deep](#deep) + * [followSymbolicLinks](#followsymboliclinks) + * [fs](#fs) + * [ignore](#ignore) + * [suppressErrors](#suppresserrors) + * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) + * [Output control](#output-control) + * [absolute](#absolute) + * [markDirectories](#markdirectories) + * [objectMode](#objectmode) + * [onlyDirectories](#onlydirectories) + * [onlyFiles](#onlyfiles) + * [stats](#stats) + * [unique](#unique) + * [Matching control](#matching-control) + * [braceExpansion](#braceexpansion) + * [caseSensitiveMatch](#casesensitivematch) + * [dot](#dot) + * [extglob](#extglob) + * [globstar](#globstar) + * [baseNameMatch](#basenamematch) +* [FAQ](#faq) + * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) + * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) + * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) + * [How to use UNC path?](#how-to-use-unc-path) + * [Compatible with `node-glob`?](#compatible-with-node-glob) +* [Benchmarks](#benchmarks) + * [Server](#server) + * [Nettop](#nettop) +* [Changelog](#changelog) +* [License](#license) + +
+ +## Highlights + +* Fast. Probably the fastest. +* Supports multiple and negative patterns. +* Synchronous, Promise and Stream API. +* Object mode. Can return more than just strings. +* Error-tolerant. + +## Old and modern mode + +This package works in two modes, depending on the environment in which it is used. + +* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. +* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. + +The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. + +## Pattern syntax + +> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. + +There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). + +> :book: This package uses [`micromatch`][micromatch] as a library for pattern matching. + +### Basic syntax + +* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). +* A double star or globstar (`**`) — matches zero or more directories. +* Question mark (`?`) – matches any single character except slashes (path separators). +* Sequence (`[seq]`) — matches any character in sequence. + +> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. + +Some examples: + +* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. +* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. +* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. + +### Advanced syntax + +* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. +* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). +* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). +* [Bash style brace expansions][micromatch_braces] (`{}`). +* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). +* [Regex groups][regular_expressions_brackets] (`(a|b)`). + +> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. + +Some examples: + +* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. +* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. +* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. +* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. + +## Installation + +```console +npm install fast-glob +``` + +## API + +### Asynchronous + +```js +fg(patterns, [options]) +fg.async(patterns, [options]) +fg.glob(patterns, [options]) +``` + +Returns a `Promise` with an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Synchronous + +```js +fg.sync(patterns, [options]) +fg.globSync(patterns, [options]) +``` + +Returns an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Stream + +```js +fg.stream(patterns, [options]) +fg.globStream(patterns, [options]) +``` + +Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. + +```js +const fg = require('fast-glob'); + +const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); + +for await (const entry of stream) { + // .editorconfig + // services/index.js +} +``` + +#### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +> :1234: [Pattern syntax](#pattern-syntax) +> +> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. + +#### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +### Helpers + +#### `generateTasks(patterns, [options])` + +Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). + +```js +fg.generateTasks('*'); + +[{ + base: '.', // Parent directory for all patterns inside this task + dynamic: true, // Dynamic or static patterns are in this task + patterns: ['*'], + positive: ['*'], + negative: [] +}] +``` + +##### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `isDynamicPattern(pattern, [options])` + +Returns `true` if the passed pattern is a dynamic pattern. + +> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + +```js +fg.isDynamicPattern('*'); // true +fg.isDynamicPattern('abc'); // false +``` + +##### pattern + +* Required: `true` +* Type: `string` + +Any correct pattern. + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `escapePath(path)` + +Returns the path with escaped special characters depending on the platform. + +* Posix: + * `*?|(){}[]`; + * `!` at the beginning of line; + * `@+!` before the opening parenthesis; + * `\\` before non-special characters; +* Windows: + * `(){}[]` + * `!` at the beginning of line; + * `@+!` before the opening parenthesis; + * Characters like `*?|` cannot be used in the path ([windows_naming_conventions][windows_naming_conventions]), so they will not be escaped; + +```js +fg.escapePath('!abc'); +// \\!abc +fg.escapePath('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac' +// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac + +fg.posix.escapePath('C:\\Program Files (x86)\\**\\*'); +// C:\\\\Program Files \\(x86\\)\\*\\*\\* +fg.win32.escapePath('C:\\Program Files (x86)\\**\\*'); +// Windows: C:\\Program Files \\(x86\\)\\**\\* +``` + +#### `convertPathToPattern(path)` + +Converts a path to a pattern depending on the platform, including special character escaping. + +* Posix. Works similarly to the `fg.posix.escapePath` method. +* Windows. Works similarly to the `fg.win32.escapePath` method, additionally converting backslashes to forward slashes in cases where they are not escape characters (`!()+@{}[]`). + +```js +fg.convertPathToPattern('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac'; +// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac + +fg.convertPathToPattern('C:/Program Files (x86)/**/*'); +// Posix: C:/Program Files \\(x86\\)/\\*\\*/\\* +// Windows: C:/Program Files \\(x86\\)/**/* + +fg.convertPathToPattern('C:\\Program Files (x86)\\**\\*'); +// Posix: C:\\\\Program Files \\(x86\\)\\*\\*\\* +// Windows: C:/Program Files \\(x86\\)/**/* + +fg.posix.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; +// Posix: \\\\\\?\\\\c:\\\\Program Files \\(x86\\)/**/* (broken pattern) +fg.win32.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; +// Windows: //?/c:/Program Files \\(x86\\)/**/* +``` + +## Options + +### Common options + +#### concurrency + +* Type: `number` +* Default: `os.cpus().length` + +Specifies the maximum number of concurrent requests from a reader to read directories. + +> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. + +
+ +More details + +In Node, there are [two types of threads][nodejs_thread_pool]: Event Loop (code) and a Thread Pool (fs, dns, …). The thread pool size controlled by the `UV_THREADPOOL_SIZE` environment variable. Its default size is 4 ([documentation][libuv_thread_pool]). The pool is one for all tasks within a single Node process. + +Any code can make 4 real concurrent accesses to the file system. The rest of the FS requests will wait in the queue. + +> :book: Each new instance of FG in the same Node process will use the same Thread pool. + +But this package also has the `concurrency` option. This option allows you to control the number of concurrent accesses to the FS at the package level. By default, this package has a value equal to the number of cores available for the current Node process. This allows you to set a value smaller than the pool size (`concurrency: 1`) or, conversely, to prepare tasks for the pool queue more quickly (`concurrency: Number.POSITIVE_INFINITY`). + +So, in fact, this package can **only make 4 concurrent requests to the FS**. You can increase this value by using an environment variable (`UV_THREADPOOL_SIZE`), but in practice this does not give a multiple advantage. + +
+ +#### cwd + +* Type: `string` +* Default: `process.cwd()` + +The current working directory in which to search. + +#### deep + +* Type: `number` +* Default: `Infinity` + +Specifies the maximum depth of a read directory relative to the start directory. + +For example, you have the following tree: + +```js +dir/ +└── one/ // 1 + └── two/ // 2 + └── file.js // 3 +``` + +```js +// With base directory +fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] +fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] + +// With cwd option +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] +``` + +> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. + +#### followSymbolicLinks + +* Type: `boolean` +* Default: `true` + +Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. + +> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. + +> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. + +#### fs + +* Type: `FileSystemAdapter` +* Default: `fs.*` + +Custom implementation of methods for working with the file system. Supports objects with enumerable properties only. + +```ts +export interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} +``` + +#### ignore + +* Type: `string[]` +* Default: `[]` + +An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. + +```js +dir/ +├── package-lock.json +└── package.json +``` + +```js +fg.sync(['*.json', '!package-lock.json']); // ['package.json'] +fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] +``` + +#### suppressErrors + +* Type: `boolean` +* Default: `false` + +By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. + +> :book: Can be useful when the directory has entries with a special level of access. + +#### throwErrorOnBrokenSymbolicLink + +* Type: `boolean` +* Default: `false` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +> :book: This option has no effect on errors when reading the symbolic link directory. + +### Output control + +#### absolute + +* Type: `boolean` +* Default: `false` + +Return the absolute path for entries. + +```js +fg.sync('*.js', { absolute: false }); // ['index.js'] +fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] +``` + +> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. + +#### markDirectories + +* Type: `boolean` +* Default: `false` + +Mark the directory path with the final slash. + +```js +fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] +fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] +``` + +#### objectMode + +* Type: `boolean` +* Default: `false` + +Returns objects (instead of strings) describing entries. + +```js +fg.sync('*', { objectMode: false }); // ['src/index.js'] +fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] +``` + +The object has the following fields: + +* name (`string`) — the last part of the path (basename) +* path (`string`) — full path relative to the pattern base directory +* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` + +> :book: An object is an internal representation of entry, so getting it does not affect performance. + +#### onlyDirectories + +* Type: `boolean` +* Default: `false` + +Return only directories. + +```js +fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] +fg.sync('*', { onlyDirectories: true }); // ['src'] +``` + +> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. + +#### onlyFiles + +* Type: `boolean` +* Default: `true` + +Return only files. + +```js +fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] +fg.sync('*', { onlyFiles: true }); // ['index.js'] +``` + +#### stats + +* Type: `boolean` +* Default: `false` + +Enables an [object mode](#objectmode) with an additional field: + +* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` + +```js +fg.sync('*', { stats: false }); // ['src/index.js'] +fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] +``` + +> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. +> +> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. + +#### unique + +* Type: `boolean` +* Default: `true` + +Ensures that the returned entries are unique. + +```js +fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] +fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] +``` + +If `true` and similar entries are found, the result is the first found. + +### Matching control + +#### braceExpansion + +* Type: `boolean` +* Default: `true` + +Enables Bash-like brace expansion. + +> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. + +```js +dir/ +├── abd +├── acd +└── a{b,c}d +``` + +```js +fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] +fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] +``` + +#### caseSensitiveMatch + +* Type: `boolean` +* Default: `true` + +Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. + +```js +dir/ +├── file.txt +└── File.txt +``` + +```js +fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] +fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] +``` + +#### dot + +* Type: `boolean` +* Default: `false` + +Allow patterns to match entries that begin with a period (`.`). + +> :book: Note that an explicit dot in a portion of the pattern will always match dot files. + +```js +dir/ +├── .editorconfig +└── package.json +``` + +```js +fg.sync('*', { dot: false }); // ['package.json'] +fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] +``` + +#### extglob + +* Type: `boolean` +* Default: `true` + +Enables Bash-like `extglob` functionality. + +> :1234: [Syntax description][micromatch_extglobs]. + +```js +dir/ +├── README.md +└── package.json +``` + +```js +fg.sync('*.+(json|md)', { extglob: false }); // [] +fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] +``` + +#### globstar + +* Type: `boolean` +* Default: `true` + +Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. + +```js +dir/ +└── a + └── b +``` + +```js +fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] +fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] +``` + +#### baseNameMatch + +* Type: `boolean` +* Default: `false` + +If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. + +```js +dir/ +└── one/ + └── file.md +``` + +```js +fg.sync('*.md', { baseNameMatch: false }); // [] +fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] +``` + +## FAQ + +## What is a static or dynamic pattern? + +All patterns can be divided into two types: + +* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. +* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. + +A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: + +* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled +* `\\` (the escape character) +* `*`, `?`, `!` (at the beginning of line) +* `[…]` +* `(…|…)` +* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) +* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) + +## How to write patterns on Windows? + +Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. + +**Bad** + +```ts +[ + 'directory\\*', + path.join(process.cwd(), '**') +] +``` + +**Good** + +```ts +[ + 'directory/*', + fg.convertPathToPattern(process.cwd()) + '/**' +] +``` + +> :book: Use the [`.convertPathToPattern`](#convertpathtopatternpath) package to convert Windows-style path to a Unix-style path. + +Read more about [matching with backslashes][micromatch_backslashes]. + +## Why are parentheses match wrong? + +```js +dir/ +└── (special-*file).txt +``` + +```js +fg.sync(['(special-*file).txt']) // [] +``` + +Refers to Bash. You need to escape special characters: + +```js +fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] +``` + +Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. Or use the [`.escapePath`](#escapepathpath). + +## How to exclude directory from reading? + +You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. + +```js +first/ +├── file.md +└── second/ + └── file.txt +``` + +If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. + +```js +fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] +fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] +``` + +> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. + +You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. + +## How to use UNC path? + +You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax) directly, but you can use them as [`cwd`](#cwd) directory or use the `fg.convertPathToPattern` method. + +```ts +// cwd +fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); +fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); + +// .convertPathToPattern +fg.sync(fg.convertPathToPattern('\\\\?\\c:\\Python27') + '/*'); +``` + +## Compatible with `node-glob`? + +| node-glob | fast-glob | +| :----------: | :-------: | +| `cwd` | [`cwd`](#cwd) | +| `root` | – | +| `dot` | [`dot`](#dot) | +| `nomount` | – | +| `mark` | [`markDirectories`](#markdirectories) | +| `nosort` | – | +| `nounique` | [`unique`](#unique) | +| `nobrace` | [`braceExpansion`](#braceexpansion) | +| `noglobstar` | [`globstar`](#globstar) | +| `noext` | [`extglob`](#extglob) | +| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | +| `matchBase` | [`baseNameMatch`](#basenamematch) | +| `nodir` | [`onlyFiles`](#onlyfiles) | +| `ignore` | [`ignore`](#ignore) | +| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | +| `realpath` | – | +| `absolute` | [`absolute`](#absolute) | + +## Benchmarks + +You can see results [here](https://github.com/mrmlnc/fast-glob/actions/workflows/benchmark.yml?query=branch%3Amaster) for every commit into the `main` branch. + +* **Product benchmark** – comparison with the main competitors. +* **Regress benchmark** – regression between the current version and the version from the npm registry. + +## Changelog + +See the [Releases section of our GitHub project][github_releases] for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. + +[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace +[github_releases]: https://github.com/mrmlnc/fast-glob/releases +[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) +[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html +[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes +[micromatch_braces]: https://github.com/micromatch/braces +[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing +[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs +[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes +[micromatch]: https://github.com/micromatch/micromatch +[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent +[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats +[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams +[node_js]: https://nodejs.org/en +[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode +[npm_normalize_path]: https://www.npmjs.com/package/normalize-path +[npm_unixify]: https://www.npmjs.com/package/unixify +[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash +[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals +[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets +[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html +[unc_path]: https://learn.microsoft.com/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc +[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity +[nodejs_thread_pool]: https://nodejs.org/en/docs/guides/dont-block-the-event-loop +[libuv_thread_pool]: http://docs.libuv.org/en/v1.x/threadpool.html +[windows_naming_conventions]: https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions diff --git a/node_modules/fast-glob/out/index.d.ts b/node_modules/fast-glob/out/index.d.ts new file mode 100644 index 0000000..46823bb --- /dev/null +++ b/node_modules/fast-glob/out/index.d.ts @@ -0,0 +1,40 @@ +/// +import * as taskManager from './managers/tasks'; +import { Options as OptionsInternal } from './settings'; +import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; +type EntryObjectModePredicate = { + [TKey in keyof Pick]-?: true; +}; +type EntryStatsPredicate = { + [TKey in keyof Pick]-?: true; +}; +type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; +declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; +declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; +declare namespace FastGlob { + type Options = OptionsInternal; + type Entry = EntryInternal; + type Task = taskManager.Task; + type Pattern = PatternInternal; + type FileSystemAdapter = FileSystemAdapterInternal; + const glob: typeof FastGlob; + const globSync: typeof sync; + const globStream: typeof stream; + const async: typeof FastGlob; + function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; + function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; + function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; + function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; + function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + namespace posix { + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + } + namespace win32 { + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + } +} +export = FastGlob; diff --git a/node_modules/fast-glob/out/index.js b/node_modules/fast-glob/out/index.js new file mode 100644 index 0000000..90365d4 --- /dev/null +++ b/node_modules/fast-glob/out/index.js @@ -0,0 +1,102 @@ +"use strict"; +const taskManager = require("./managers/tasks"); +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +const utils = require("./utils"); +async function FastGlob(source, options) { + assertPatternsInput(source); + const works = getWorks(source, async_1.default, options); + const result = await Promise.all(works); + return utils.array.flatten(result); +} +// https://github.com/typescript-eslint/typescript-eslint/issues/60 +// eslint-disable-next-line no-redeclare +(function (FastGlob) { + FastGlob.glob = FastGlob; + FastGlob.globSync = sync; + FastGlob.globStream = stream; + FastGlob.async = FastGlob; + function sync(source, options) { + assertPatternsInput(source); + const works = getWorks(source, sync_1.default, options); + return utils.array.flatten(works); + } + FastGlob.sync = sync; + function stream(source, options) { + assertPatternsInput(source); + const works = getWorks(source, stream_1.default, options); + /** + * The stream returned by the provider cannot work with an asynchronous iterator. + * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. + * This affects performance (+25%). I don't see best solution right now. + */ + return utils.stream.merge(works); + } + FastGlob.stream = stream; + function generateTasks(source, options) { + assertPatternsInput(source); + const patterns = [].concat(source); + const settings = new settings_1.default(options); + return taskManager.generate(patterns, settings); + } + FastGlob.generateTasks = generateTasks; + function isDynamicPattern(source, options) { + assertPatternsInput(source); + const settings = new settings_1.default(options); + return utils.pattern.isDynamicPattern(source, settings); + } + FastGlob.isDynamicPattern = isDynamicPattern; + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escape(source); + } + FastGlob.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPathToPattern(source); + } + FastGlob.convertPathToPattern = convertPathToPattern; + let posix; + (function (posix) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapePosixPath(source); + } + posix.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPosixPathToPattern(source); + } + posix.convertPathToPattern = convertPathToPattern; + })(posix = FastGlob.posix || (FastGlob.posix = {})); + let win32; + (function (win32) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapeWindowsPath(source); + } + win32.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertWindowsPathToPattern(source); + } + win32.convertPathToPattern = convertPathToPattern; + })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); +})(FastGlob || (FastGlob = {})); +function getWorks(source, _Provider, options) { + const patterns = [].concat(source); + const settings = new settings_1.default(options); + const tasks = taskManager.generate(patterns, settings); + const provider = new _Provider(settings); + return tasks.map(provider.read, provider); +} +function assertPatternsInput(input) { + const source = [].concat(input); + const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); + if (!isValidSource) { + throw new TypeError('Patterns must be a string (non empty) or an array of strings'); + } +} +module.exports = FastGlob; diff --git a/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/fast-glob/out/managers/tasks.d.ts new file mode 100644 index 0000000..59d2c42 --- /dev/null +++ b/node_modules/fast-glob/out/managers/tasks.d.ts @@ -0,0 +1,22 @@ +import Settings from '../settings'; +import { Pattern, PatternsGroup } from '../types'; +export type Task = { + base: string; + dynamic: boolean; + patterns: Pattern[]; + positive: Pattern[]; + negative: Pattern[]; +}; +export declare function generate(input: Pattern[], settings: Settings): Task[]; +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; +export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; +export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; +export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/fast-glob/out/managers/tasks.js b/node_modules/fast-glob/out/managers/tasks.js new file mode 100644 index 0000000..335a765 --- /dev/null +++ b/node_modules/fast-glob/out/managers/tasks.js @@ -0,0 +1,110 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; +const utils = require("../utils"); +function generate(input, settings) { + const patterns = processPatterns(input, settings); + const ignore = processPatterns(settings.ignore, settings); + const positivePatterns = getPositivePatterns(patterns); + const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); + const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); + const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); + const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); + const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); + return staticTasks.concat(dynamicTasks); +} +exports.generate = generate; +function processPatterns(input, settings) { + let patterns = input; + /** + * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry + * and some problems with the micromatch package (see fast-glob issues: #365, #394). + * + * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown + * in matching in the case of a large set of patterns after expansion. + */ + if (settings.braceExpansion) { + patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); + } + /** + * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used + * at any nesting level. + * + * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change + * the pattern in the filter before creating a regular expression. There is no need to change the patterns + * in the application. Only on the input. + */ + if (settings.baseNameMatch) { + patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); + } + /** + * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. + */ + return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); +} +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +function convertPatternsToTasks(positive, negative, dynamic) { + const tasks = []; + const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); + const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); + const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); + const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); + tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); + /* + * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory + * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. + */ + if ('.' in insideCurrentDirectoryGroup) { + tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); + } + else { + tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); + } + return tasks; +} +exports.convertPatternsToTasks = convertPatternsToTasks; +function getPositivePatterns(patterns) { + return utils.pattern.getPositivePatterns(patterns); +} +exports.getPositivePatterns = getPositivePatterns; +function getNegativePatternsAsPositive(patterns, ignore) { + const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); + const positive = negative.map(utils.pattern.convertToPositivePattern); + return positive; +} +exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; +function groupPatternsByBaseDirectory(patterns) { + const group = {}; + return patterns.reduce((collection, pattern) => { + const base = utils.pattern.getBaseDirectory(pattern); + if (base in collection) { + collection[base].push(pattern); + } + else { + collection[base] = [pattern]; + } + return collection; + }, group); +} +exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; +function convertPatternGroupsToTasks(positive, negative, dynamic) { + return Object.keys(positive).map((base) => { + return convertPatternGroupToTask(base, positive[base], negative, dynamic); + }); +} +exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; +function convertPatternGroupToTask(base, positive, negative, dynamic) { + return { + dynamic, + positive, + negative, + base, + patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) + }; +} +exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/fast-glob/out/providers/async.d.ts new file mode 100644 index 0000000..2742616 --- /dev/null +++ b/node_modules/fast-glob/out/providers/async.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import ReaderAsync from '../readers/async'; +import Provider from './provider'; +export default class ProviderAsync extends Provider> { + protected _reader: ReaderAsync; + read(task: Task): Promise; + api(root: string, task: Task, options: ReaderOptions): Promise; +} diff --git a/node_modules/fast-glob/out/providers/async.js b/node_modules/fast-glob/out/providers/async.js new file mode 100644 index 0000000..0c5286e --- /dev/null +++ b/node_modules/fast-glob/out/providers/async.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +const provider_1 = require("./provider"); +class ProviderAsync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new async_1.default(this._settings); + } + async read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = await this.api(root, task, options); + return entries.map((entry) => options.transform(entry)); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderAsync; diff --git a/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/fast-glob/out/providers/filters/deep.d.ts new file mode 100644 index 0000000..377fab8 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/deep.d.ts @@ -0,0 +1,16 @@ +import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; +import Settings from '../../settings'; +export default class DeepFilter { + private readonly _settings; + private readonly _micromatchOptions; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _getMatcher; + private _getNegativePatternsRe; + private _filter; + private _isSkippedByDeep; + private _getEntryLevel; + private _isSkippedSymbolicLink; + private _isSkippedByPositivePatterns; + private _isSkippedByNegativePatterns; +} diff --git a/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/fast-glob/out/providers/filters/deep.js new file mode 100644 index 0000000..644bf41 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/deep.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +const partial_1 = require("../matchers/partial"); +class DeepFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + } + getFilter(basePath, positive, negative) { + const matcher = this._getMatcher(positive); + const negativeRe = this._getNegativePatternsRe(negative); + return (entry) => this._filter(basePath, entry, matcher, negativeRe); + } + _getMatcher(patterns) { + return new partial_1.default(patterns, this._settings, this._micromatchOptions); + } + _getNegativePatternsRe(patterns) { + const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); + return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); + } + _filter(basePath, entry, matcher, negativeRe) { + if (this._isSkippedByDeep(basePath, entry.path)) { + return false; + } + if (this._isSkippedSymbolicLink(entry)) { + return false; + } + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._isSkippedByPositivePatterns(filepath, matcher)) { + return false; + } + return this._isSkippedByNegativePatterns(filepath, negativeRe); + } + _isSkippedByDeep(basePath, entryPath) { + /** + * Avoid unnecessary depth calculations when it doesn't matter. + */ + if (this._settings.deep === Infinity) { + return false; + } + return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; + } + _getEntryLevel(basePath, entryPath) { + const entryPathDepth = entryPath.split('/').length; + if (basePath === '') { + return entryPathDepth; + } + const basePathDepth = basePath.split('/').length; + return entryPathDepth - basePathDepth; + } + _isSkippedSymbolicLink(entry) { + return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); + } + _isSkippedByPositivePatterns(entryPath, matcher) { + return !this._settings.baseNameMatch && !matcher.match(entryPath); + } + _isSkippedByNegativePatterns(entryPath, patternsRe) { + return !utils.pattern.matchAny(entryPath, patternsRe); + } +} +exports.default = DeepFilter; diff --git a/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/fast-glob/out/providers/filters/entry.d.ts new file mode 100644 index 0000000..23db353 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/entry.d.ts @@ -0,0 +1,17 @@ +import Settings from '../../settings'; +import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; +export default class EntryFilter { + private readonly _settings; + private readonly _micromatchOptions; + readonly index: Map; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _filter; + private _isDuplicateEntry; + private _createIndexRecord; + private _onlyFileFilter; + private _onlyDirectoryFilter; + private _isMatchToPatternsSet; + private _isMatchToAbsoluteNegative; + private _isMatchToPatterns; +} diff --git a/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/fast-glob/out/providers/filters/entry.js new file mode 100644 index 0000000..0c9210c --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/entry.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this.index = new Map(); + } + getFilter(positive, negative) { + const [absoluteNegative, relativeNegative] = utils.pattern.partitionAbsoluteAndRelative(negative); + const patterns = { + positive: { + all: utils.pattern.convertPatternsToRe(positive, this._micromatchOptions) + }, + negative: { + absolute: utils.pattern.convertPatternsToRe(absoluteNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })), + relative: utils.pattern.convertPatternsToRe(relativeNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })) + } + }; + return (entry) => this._filter(entry, patterns); + } + _filter(entry, patterns) { + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._settings.unique && this._isDuplicateEntry(filepath)) { + return false; + } + if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { + return false; + } + const isMatched = this._isMatchToPatternsSet(filepath, patterns, entry.dirent.isDirectory()); + if (this._settings.unique && isMatched) { + this._createIndexRecord(filepath); + } + return isMatched; + } + _isDuplicateEntry(filepath) { + return this.index.has(filepath); + } + _createIndexRecord(filepath) { + this.index.set(filepath, undefined); + } + _onlyFileFilter(entry) { + return this._settings.onlyFiles && !entry.dirent.isFile(); + } + _onlyDirectoryFilter(entry) { + return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + _isMatchToPatternsSet(filepath, patterns, isDirectory) { + const isMatched = this._isMatchToPatterns(filepath, patterns.positive.all, isDirectory); + if (!isMatched) { + return false; + } + const isMatchedByRelativeNegative = this._isMatchToPatterns(filepath, patterns.negative.relative, isDirectory); + if (isMatchedByRelativeNegative) { + return false; + } + const isMatchedByAbsoluteNegative = this._isMatchToAbsoluteNegative(filepath, patterns.negative.absolute, isDirectory); + if (isMatchedByAbsoluteNegative) { + return false; + } + return true; + } + _isMatchToAbsoluteNegative(filepath, patternsRe, isDirectory) { + if (patternsRe.length === 0) { + return false; + } + const fullpath = utils.path.makeAbsolute(this._settings.cwd, filepath); + return this._isMatchToPatterns(fullpath, patternsRe, isDirectory); + } + _isMatchToPatterns(filepath, patternsRe, isDirectory) { + if (patternsRe.length === 0) { + return false; + } + // Trying to match files and directories by patterns. + const isMatched = utils.pattern.matchAny(filepath, patternsRe); + // A pattern with a trailling slash can be used for directory matching. + // To apply such pattern, we need to add a tralling slash to the path. + if (!isMatched && isDirectory) { + return utils.pattern.matchAny(filepath + '/', patternsRe); + } + return isMatched; + } +} +exports.default = EntryFilter; diff --git a/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/fast-glob/out/providers/filters/error.d.ts new file mode 100644 index 0000000..170eb25 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/error.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { ErrorFilterFunction } from '../../types'; +export default class ErrorFilter { + private readonly _settings; + constructor(_settings: Settings); + getFilter(): ErrorFilterFunction; + private _isNonFatalError; +} diff --git a/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/fast-glob/out/providers/filters/error.js new file mode 100644 index 0000000..1c6f241 --- /dev/null +++ b/node_modules/fast-glob/out/providers/filters/error.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class ErrorFilter { + constructor(_settings) { + this._settings = _settings; + } + getFilter() { + return (error) => this._isNonFatalError(error); + } + _isNonFatalError(error) { + return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; + } +} +exports.default = ErrorFilter; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts new file mode 100644 index 0000000..d04c232 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts @@ -0,0 +1,33 @@ +import { Pattern, MicromatchOptions, PatternRe } from '../../types'; +import Settings from '../../settings'; +export type PatternSegment = StaticPatternSegment | DynamicPatternSegment; +type StaticPatternSegment = { + dynamic: false; + pattern: Pattern; +}; +type DynamicPatternSegment = { + dynamic: true; + pattern: Pattern; + patternRe: PatternRe; +}; +export type PatternSection = PatternSegment[]; +export type PatternInfo = { + /** + * Indicates that the pattern has a globstar (more than a single section). + */ + complete: boolean; + pattern: Pattern; + segments: PatternSegment[]; + sections: PatternSection[]; +}; +export default abstract class Matcher { + private readonly _patterns; + private readonly _settings; + private readonly _micromatchOptions; + protected readonly _storage: PatternInfo[]; + constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); + private _fillStorage; + private _getPatternSegments; + private _splitSegmentsIntoSections; +} +export {}; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/fast-glob/out/providers/matchers/matcher.js new file mode 100644 index 0000000..eae67c9 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/matcher.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class Matcher { + constructor(_patterns, _settings, _micromatchOptions) { + this._patterns = _patterns; + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this._storage = []; + this._fillStorage(); + } + _fillStorage() { + for (const pattern of this._patterns) { + const segments = this._getPatternSegments(pattern); + const sections = this._splitSegmentsIntoSections(segments); + this._storage.push({ + complete: sections.length <= 1, + pattern, + segments, + sections + }); + } + } + _getPatternSegments(pattern) { + const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); + return parts.map((part) => { + const dynamic = utils.pattern.isDynamicPattern(part, this._settings); + if (!dynamic) { + return { + dynamic: false, + pattern: part + }; + } + return { + dynamic: true, + pattern: part, + patternRe: utils.pattern.makeRe(part, this._micromatchOptions) + }; + }); + } + _splitSegmentsIntoSections(segments) { + return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); + } +} +exports.default = Matcher; diff --git a/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/fast-glob/out/providers/matchers/partial.d.ts new file mode 100644 index 0000000..91520f6 --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/partial.d.ts @@ -0,0 +1,4 @@ +import Matcher from './matcher'; +export default class PartialMatcher extends Matcher { + match(filepath: string): boolean; +} diff --git a/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/fast-glob/out/providers/matchers/partial.js new file mode 100644 index 0000000..1dfffeb --- /dev/null +++ b/node_modules/fast-glob/out/providers/matchers/partial.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const matcher_1 = require("./matcher"); +class PartialMatcher extends matcher_1.default { + match(filepath) { + const parts = filepath.split('/'); + const levels = parts.length; + const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); + for (const pattern of patterns) { + const section = pattern.sections[0]; + /** + * In this case, the pattern has a globstar and we must read all directories unconditionally, + * but only if the level has reached the end of the first group. + * + * fixtures/{a,b}/** + * ^ true/false ^ always true + */ + if (!pattern.complete && levels > section.length) { + return true; + } + const match = parts.every((part, index) => { + const segment = pattern.segments[index]; + if (segment.dynamic && segment.patternRe.test(part)) { + return true; + } + if (!segment.dynamic && segment.pattern === part) { + return true; + } + return false; + }); + if (match) { + return true; + } + } + return false; + } +} +exports.default = PartialMatcher; diff --git a/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/fast-glob/out/providers/provider.d.ts new file mode 100644 index 0000000..1053460 --- /dev/null +++ b/node_modules/fast-glob/out/providers/provider.d.ts @@ -0,0 +1,19 @@ +import { Task } from '../managers/tasks'; +import Settings from '../settings'; +import { MicromatchOptions, ReaderOptions } from '../types'; +import DeepFilter from './filters/deep'; +import EntryFilter from './filters/entry'; +import ErrorFilter from './filters/error'; +import EntryTransformer from './transformers/entry'; +export default abstract class Provider { + protected readonly _settings: Settings; + readonly errorFilter: ErrorFilter; + readonly entryFilter: EntryFilter; + readonly deepFilter: DeepFilter; + readonly entryTransformer: EntryTransformer; + constructor(_settings: Settings); + abstract read(_task: Task): T; + protected _getRootDirectory(task: Task): string; + protected _getReaderOptions(task: Task): ReaderOptions; + protected _getMicromatchOptions(): MicromatchOptions; +} diff --git a/node_modules/fast-glob/out/providers/provider.js b/node_modules/fast-glob/out/providers/provider.js new file mode 100644 index 0000000..da88ee0 --- /dev/null +++ b/node_modules/fast-glob/out/providers/provider.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const deep_1 = require("./filters/deep"); +const entry_1 = require("./filters/entry"); +const error_1 = require("./filters/error"); +const entry_2 = require("./transformers/entry"); +class Provider { + constructor(_settings) { + this._settings = _settings; + this.errorFilter = new error_1.default(this._settings); + this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); + this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); + this.entryTransformer = new entry_2.default(this._settings); + } + _getRootDirectory(task) { + return path.resolve(this._settings.cwd, task.base); + } + _getReaderOptions(task) { + const basePath = task.base === '.' ? '' : task.base; + return { + basePath, + pathSegmentSeparator: '/', + concurrency: this._settings.concurrency, + deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), + entryFilter: this.entryFilter.getFilter(task.positive, task.negative), + errorFilter: this.errorFilter.getFilter(), + followSymbolicLinks: this._settings.followSymbolicLinks, + fs: this._settings.fs, + stats: this._settings.stats, + throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, + transform: this.entryTransformer.getTransformer() + }; + } + _getMicromatchOptions() { + return { + dot: this._settings.dot, + matchBase: this._settings.baseNameMatch, + nobrace: !this._settings.braceExpansion, + nocase: !this._settings.caseSensitiveMatch, + noext: !this._settings.extglob, + noglobstar: !this._settings.globstar, + posix: true, + strictSlashes: false + }; + } +} +exports.default = Provider; diff --git a/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/fast-glob/out/providers/stream.d.ts new file mode 100644 index 0000000..3d02a1f --- /dev/null +++ b/node_modules/fast-glob/out/providers/stream.d.ts @@ -0,0 +1,11 @@ +/// +import { Readable } from 'stream'; +import { Task } from '../managers/tasks'; +import ReaderStream from '../readers/stream'; +import { ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderStream extends Provider { + protected _reader: ReaderStream; + read(task: Task): Readable; + api(root: string, task: Task, options: ReaderOptions): Readable; +} diff --git a/node_modules/fast-glob/out/providers/stream.js b/node_modules/fast-glob/out/providers/stream.js new file mode 100644 index 0000000..85da62e --- /dev/null +++ b/node_modules/fast-glob/out/providers/stream.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const stream_2 = require("../readers/stream"); +const provider_1 = require("./provider"); +class ProviderStream extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_2.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const source = this.api(root, task, options); + const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); + source + .once('error', (error) => destination.emit('error', error)) + .on('data', (entry) => destination.emit('data', options.transform(entry))) + .once('end', () => destination.emit('end')); + destination + .once('close', () => source.destroy()); + return destination; + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderStream; diff --git a/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/fast-glob/out/providers/sync.d.ts new file mode 100644 index 0000000..9c0fe1e --- /dev/null +++ b/node_modules/fast-glob/out/providers/sync.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import ReaderSync from '../readers/sync'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderSync extends Provider { + protected _reader: ReaderSync; + read(task: Task): EntryItem[]; + api(root: string, task: Task, options: ReaderOptions): Entry[]; +} diff --git a/node_modules/fast-glob/out/providers/sync.js b/node_modules/fast-glob/out/providers/sync.js new file mode 100644 index 0000000..d70aa1b --- /dev/null +++ b/node_modules/fast-glob/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +const provider_1 = require("./provider"); +class ProviderSync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new sync_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = this.api(root, task, options); + return entries.map(options.transform); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderSync; diff --git a/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/fast-glob/out/providers/transformers/entry.d.ts new file mode 100644 index 0000000..e9b85fa --- /dev/null +++ b/node_modules/fast-glob/out/providers/transformers/entry.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { EntryTransformerFunction } from '../../types'; +export default class EntryTransformer { + private readonly _settings; + constructor(_settings: Settings); + getTransformer(): EntryTransformerFunction; + private _transform; +} diff --git a/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/fast-glob/out/providers/transformers/entry.js new file mode 100644 index 0000000..d11903c --- /dev/null +++ b/node_modules/fast-glob/out/providers/transformers/entry.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryTransformer { + constructor(_settings) { + this._settings = _settings; + } + getTransformer() { + return (entry) => this._transform(entry); + } + _transform(entry) { + let filepath = entry.path; + if (this._settings.absolute) { + filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); + filepath = utils.path.unixify(filepath); + } + if (this._settings.markDirectories && entry.dirent.isDirectory()) { + filepath += '/'; + } + if (!this._settings.objectMode) { + return filepath; + } + return Object.assign(Object.assign({}, entry), { path: filepath }); + } +} +exports.default = EntryTransformer; diff --git a/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/fast-glob/out/readers/async.d.ts new file mode 100644 index 0000000..fbca428 --- /dev/null +++ b/node_modules/fast-glob/out/readers/async.d.ts @@ -0,0 +1,10 @@ +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, ReaderOptions, Pattern } from '../types'; +import Reader from './reader'; +import ReaderStream from './stream'; +export default class ReaderAsync extends Reader> { + protected _walkAsync: typeof fsWalk.walk; + protected _readerStream: ReaderStream; + dynamic(root: string, options: ReaderOptions): Promise; + static(patterns: Pattern[], options: ReaderOptions): Promise; +} diff --git a/node_modules/fast-glob/out/readers/async.js b/node_modules/fast-glob/out/readers/async.js new file mode 100644 index 0000000..d024145 --- /dev/null +++ b/node_modules/fast-glob/out/readers/async.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +const stream_1 = require("./stream"); +class ReaderAsync extends reader_1.default { + constructor() { + super(...arguments); + this._walkAsync = fsWalk.walk; + this._readerStream = new stream_1.default(this._settings); + } + dynamic(root, options) { + return new Promise((resolve, reject) => { + this._walkAsync(root, options, (error, entries) => { + if (error === null) { + resolve(entries); + } + else { + reject(error); + } + }); + }); + } + async static(patterns, options) { + const entries = []; + const stream = this._readerStream.static(patterns, options); + // After #235, replace it with an asynchronous iterator. + return new Promise((resolve, reject) => { + stream.once('error', reject); + stream.on('data', (entry) => entries.push(entry)); + stream.once('end', () => resolve(entries)); + }); + } +} +exports.default = ReaderAsync; diff --git a/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/fast-glob/out/readers/reader.d.ts new file mode 100644 index 0000000..2af16b6 --- /dev/null +++ b/node_modules/fast-glob/out/readers/reader.d.ts @@ -0,0 +1,15 @@ +/// +import * as fs from 'fs'; +import * as fsStat from '@nodelib/fs.stat'; +import Settings from '../settings'; +import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; +export default abstract class Reader { + protected readonly _settings: Settings; + protected readonly _fsStatSettings: fsStat.Settings; + constructor(_settings: Settings); + abstract dynamic(root: string, options: ReaderOptions): T; + abstract static(patterns: Pattern[], options: ReaderOptions): T; + protected _getFullEntryPath(filepath: string): string; + protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; + protected _isFatalError(error: ErrnoException): boolean; +} diff --git a/node_modules/fast-glob/out/readers/reader.js b/node_modules/fast-glob/out/readers/reader.js new file mode 100644 index 0000000..7b40255 --- /dev/null +++ b/node_modules/fast-glob/out/readers/reader.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const utils = require("../utils"); +class Reader { + constructor(_settings) { + this._settings = _settings; + this._fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this._settings.followSymbolicLinks, + fs: this._settings.fs, + throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks + }); + } + _getFullEntryPath(filepath) { + return path.resolve(this._settings.cwd, filepath); + } + _makeEntry(stats, pattern) { + const entry = { + name: pattern, + path: pattern, + dirent: utils.fs.createDirentFromStats(pattern, stats) + }; + if (this._settings.stats) { + entry.stats = stats; + } + return entry; + } + _isFatalError(error) { + return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; + } +} +exports.default = Reader; diff --git a/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/fast-glob/out/readers/stream.d.ts new file mode 100644 index 0000000..1c74cac --- /dev/null +++ b/node_modules/fast-glob/out/readers/stream.d.ts @@ -0,0 +1,14 @@ +/// +import { Readable } from 'stream'; +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderStream extends Reader { + protected _walkStream: typeof fsWalk.walkStream; + protected _stat: typeof fsStat.stat; + dynamic(root: string, options: ReaderOptions): Readable; + static(patterns: Pattern[], options: ReaderOptions): Readable; + private _getEntry; + private _getStat; +} diff --git a/node_modules/fast-glob/out/readers/stream.js b/node_modules/fast-glob/out/readers/stream.js new file mode 100644 index 0000000..317c6d5 --- /dev/null +++ b/node_modules/fast-glob/out/readers/stream.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderStream extends reader_1.default { + constructor() { + super(...arguments); + this._walkStream = fsWalk.walkStream; + this._stat = fsStat.stat; + } + dynamic(root, options) { + return this._walkStream(root, options); + } + static(patterns, options) { + const filepaths = patterns.map(this._getFullEntryPath, this); + const stream = new stream_1.PassThrough({ objectMode: true }); + stream._write = (index, _enc, done) => { + return this._getEntry(filepaths[index], patterns[index], options) + .then((entry) => { + if (entry !== null && options.entryFilter(entry)) { + stream.push(entry); + } + if (index === filepaths.length - 1) { + stream.end(); + } + done(); + }) + .catch(done); + }; + for (let i = 0; i < filepaths.length; i++) { + stream.write(i); + } + return stream; + } + _getEntry(filepath, pattern, options) { + return this._getStat(filepath) + .then((stats) => this._makeEntry(stats, pattern)) + .catch((error) => { + if (options.errorFilter(error)) { + return null; + } + throw error; + }); + } + _getStat(filepath) { + return new Promise((resolve, reject) => { + this._stat(filepath, this._fsStatSettings, (error, stats) => { + return error === null ? resolve(stats) : reject(error); + }); + }); + } +} +exports.default = ReaderStream; diff --git a/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/fast-glob/out/readers/sync.d.ts new file mode 100644 index 0000000..c96ffee --- /dev/null +++ b/node_modules/fast-glob/out/readers/sync.d.ts @@ -0,0 +1,12 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderSync extends Reader { + protected _walkSync: typeof fsWalk.walkSync; + protected _statSync: typeof fsStat.statSync; + dynamic(root: string, options: ReaderOptions): Entry[]; + static(patterns: Pattern[], options: ReaderOptions): Entry[]; + private _getEntry; + private _getStat; +} diff --git a/node_modules/fast-glob/out/readers/sync.js b/node_modules/fast-glob/out/readers/sync.js new file mode 100644 index 0000000..4704d65 --- /dev/null +++ b/node_modules/fast-glob/out/readers/sync.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderSync extends reader_1.default { + constructor() { + super(...arguments); + this._walkSync = fsWalk.walkSync; + this._statSync = fsStat.statSync; + } + dynamic(root, options) { + return this._walkSync(root, options); + } + static(patterns, options) { + const entries = []; + for (const pattern of patterns) { + const filepath = this._getFullEntryPath(pattern); + const entry = this._getEntry(filepath, pattern, options); + if (entry === null || !options.entryFilter(entry)) { + continue; + } + entries.push(entry); + } + return entries; + } + _getEntry(filepath, pattern, options) { + try { + const stats = this._getStat(filepath); + return this._makeEntry(stats, pattern); + } + catch (error) { + if (options.errorFilter(error)) { + return null; + } + throw error; + } + } + _getStat(filepath) { + return this._statSync(filepath, this._fsStatSettings); + } +} +exports.default = ReaderSync; diff --git a/node_modules/fast-glob/out/settings.d.ts b/node_modules/fast-glob/out/settings.d.ts new file mode 100644 index 0000000..76a74f8 --- /dev/null +++ b/node_modules/fast-glob/out/settings.d.ts @@ -0,0 +1,164 @@ +import { FileSystemAdapter, Pattern } from './types'; +export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export type Options = { + /** + * Return the absolute path for entries. + * + * @default false + */ + absolute?: boolean; + /** + * If set to `true`, then patterns without slashes will be matched against + * the basename of the path if it contains slashes. + * + * @default false + */ + baseNameMatch?: boolean; + /** + * Enables Bash-like brace expansion. + * + * @default true + */ + braceExpansion?: boolean; + /** + * Enables a case-sensitive mode for matching files. + * + * @default true + */ + caseSensitiveMatch?: boolean; + /** + * Specifies the maximum number of concurrent requests from a reader to read + * directories. + * + * @default os.cpus().length + */ + concurrency?: number; + /** + * The current working directory in which to search. + * + * @default process.cwd() + */ + cwd?: string; + /** + * Specifies the maximum depth of a read directory relative to the start + * directory. + * + * @default Infinity + */ + deep?: number; + /** + * Allow patterns to match entries that begin with a period (`.`). + * + * @default false + */ + dot?: boolean; + /** + * Enables Bash-like `extglob` functionality. + * + * @default true + */ + extglob?: boolean; + /** + * Indicates whether to traverse descendants of symbolic link directories. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Custom implementation of methods for working with the file system. + * + * @default fs.* + */ + fs?: Partial; + /** + * Enables recursively repeats a pattern containing `**`. + * If `false`, `**` behaves exactly like `*`. + * + * @default true + */ + globstar?: boolean; + /** + * An array of glob patterns to exclude matches. + * This is an alternative way to use negative patterns. + * + * @default [] + */ + ignore?: Pattern[]; + /** + * Mark the directory path with the final slash. + * + * @default false + */ + markDirectories?: boolean; + /** + * Returns objects (instead of strings) describing entries. + * + * @default false + */ + objectMode?: boolean; + /** + * Return only directories. + * + * @default false + */ + onlyDirectories?: boolean; + /** + * Return only files. + * + * @default true + */ + onlyFiles?: boolean; + /** + * Enables an object mode (`objectMode`) with an additional `stats` field. + * + * @default false + */ + stats?: boolean; + /** + * By default this package suppress only `ENOENT` errors. + * Set to `true` to suppress any error. + * + * @default false + */ + suppressErrors?: boolean; + /** + * Throw an error when symbolic link is broken if `true` or safely + * return `lstat` call if `false`. + * + * @default false + */ + throwErrorOnBrokenSymbolicLink?: boolean; + /** + * Ensures that the returned entries are unique. + * + * @default true + */ + unique?: boolean; +}; +export default class Settings { + private readonly _options; + readonly absolute: boolean; + readonly baseNameMatch: boolean; + readonly braceExpansion: boolean; + readonly caseSensitiveMatch: boolean; + readonly concurrency: number; + readonly cwd: string; + readonly deep: number; + readonly dot: boolean; + readonly extglob: boolean; + readonly followSymbolicLinks: boolean; + readonly fs: FileSystemAdapter; + readonly globstar: boolean; + readonly ignore: Pattern[]; + readonly markDirectories: boolean; + readonly objectMode: boolean; + readonly onlyDirectories: boolean; + readonly onlyFiles: boolean; + readonly stats: boolean; + readonly suppressErrors: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly unique: boolean; + constructor(_options?: Options); + private _getValue; + private _getFileSystemMethods; +} diff --git a/node_modules/fast-glob/out/settings.js b/node_modules/fast-glob/out/settings.js new file mode 100644 index 0000000..23f916c --- /dev/null +++ b/node_modules/fast-glob/out/settings.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +const os = require("os"); +/** + * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. + * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 + */ +const CPU_COUNT = Math.max(os.cpus().length, 1); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + lstatSync: fs.lstatSync, + stat: fs.stat, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +class Settings { + constructor(_options = {}) { + this._options = _options; + this.absolute = this._getValue(this._options.absolute, false); + this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); + this.braceExpansion = this._getValue(this._options.braceExpansion, true); + this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); + this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); + this.cwd = this._getValue(this._options.cwd, process.cwd()); + this.deep = this._getValue(this._options.deep, Infinity); + this.dot = this._getValue(this._options.dot, false); + this.extglob = this._getValue(this._options.extglob, true); + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); + this.fs = this._getFileSystemMethods(this._options.fs); + this.globstar = this._getValue(this._options.globstar, true); + this.ignore = this._getValue(this._options.ignore, []); + this.markDirectories = this._getValue(this._options.markDirectories, false); + this.objectMode = this._getValue(this._options.objectMode, false); + this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); + this.onlyFiles = this._getValue(this._options.onlyFiles, true); + this.stats = this._getValue(this._options.stats, false); + this.suppressErrors = this._getValue(this._options.suppressErrors, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); + this.unique = this._getValue(this._options.unique, true); + if (this.onlyDirectories) { + this.onlyFiles = false; + } + if (this.stats) { + this.objectMode = true; + } + // Remove the cast to the array in the next major (#404). + this.ignore = [].concat(this.ignore); + } + _getValue(option, value) { + return option === undefined ? value : option; + } + _getFileSystemMethods(methods = {}) { + return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + } +} +exports.default = Settings; diff --git a/node_modules/fast-glob/out/types/index.d.ts b/node_modules/fast-glob/out/types/index.d.ts new file mode 100644 index 0000000..6506caf --- /dev/null +++ b/node_modules/fast-glob/out/types/index.d.ts @@ -0,0 +1,31 @@ +/// +import * as fsWalk from '@nodelib/fs.walk'; +export type ErrnoException = NodeJS.ErrnoException; +export type Entry = fsWalk.Entry; +export type EntryItem = string | Entry; +export type Pattern = string; +export type PatternRe = RegExp; +export type PatternsGroup = Record; +export type ReaderOptions = fsWalk.Options & { + transform(entry: Entry): EntryItem; + deepFilter: DeepFilterFunction; + entryFilter: EntryFilterFunction; + errorFilter: ErrorFilterFunction; + fs: FileSystemAdapter; + stats: boolean; +}; +export type ErrorFilterFunction = fsWalk.ErrorFilterFunction; +export type EntryFilterFunction = fsWalk.EntryFilterFunction; +export type DeepFilterFunction = fsWalk.DeepFilterFunction; +export type EntryTransformerFunction = (entry: Entry) => EntryItem; +export type MicromatchOptions = { + dot?: boolean; + matchBase?: boolean; + nobrace?: boolean; + nocase?: boolean; + noext?: boolean; + noglobstar?: boolean; + posix?: boolean; + strictSlashes?: boolean; +}; +export type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/fast-glob/out/types/index.js b/node_modules/fast-glob/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/fast-glob/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/fast-glob/out/utils/array.d.ts new file mode 100644 index 0000000..98e7325 --- /dev/null +++ b/node_modules/fast-glob/out/utils/array.d.ts @@ -0,0 +1,2 @@ +export declare function flatten(items: T[][]): T[]; +export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/fast-glob/out/utils/array.js b/node_modules/fast-glob/out/utils/array.js new file mode 100644 index 0000000..50c406e --- /dev/null +++ b/node_modules/fast-glob/out/utils/array.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitWhen = exports.flatten = void 0; +function flatten(items) { + return items.reduce((collection, item) => [].concat(collection, item), []); +} +exports.flatten = flatten; +function splitWhen(items, predicate) { + const result = [[]]; + let groupIndex = 0; + for (const item of items) { + if (predicate(item)) { + groupIndex++; + result[groupIndex] = []; + } + else { + result[groupIndex].push(item); + } + } + return result; +} +exports.splitWhen = splitWhen; diff --git a/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/fast-glob/out/utils/errno.d.ts new file mode 100644 index 0000000..1c08d3b --- /dev/null +++ b/node_modules/fast-glob/out/utils/errno.d.ts @@ -0,0 +1,2 @@ +import { ErrnoException } from '../types'; +export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/fast-glob/out/utils/errno.js b/node_modules/fast-glob/out/utils/errno.js new file mode 100644 index 0000000..f0bd801 --- /dev/null +++ b/node_modules/fast-glob/out/utils/errno.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEnoentCodeError = void 0; +function isEnoentCodeError(error) { + return error.code === 'ENOENT'; +} +exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/fast-glob/out/utils/fs.d.ts new file mode 100644 index 0000000..64c61ce --- /dev/null +++ b/node_modules/fast-glob/out/utils/fs.d.ts @@ -0,0 +1,4 @@ +/// +import * as fs from 'fs'; +import { Dirent } from '@nodelib/fs.walk'; +export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/fast-glob/out/utils/fs.js b/node_modules/fast-glob/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/fast-glob/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/fast-glob/out/utils/index.d.ts new file mode 100644 index 0000000..f634cad --- /dev/null +++ b/node_modules/fast-glob/out/utils/index.d.ts @@ -0,0 +1,8 @@ +import * as array from './array'; +import * as errno from './errno'; +import * as fs from './fs'; +import * as path from './path'; +import * as pattern from './pattern'; +import * as stream from './stream'; +import * as string from './string'; +export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/fast-glob/out/utils/index.js b/node_modules/fast-glob/out/utils/index.js new file mode 100644 index 0000000..0f92c16 --- /dev/null +++ b/node_modules/fast-glob/out/utils/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; +const array = require("./array"); +exports.array = array; +const errno = require("./errno"); +exports.errno = errno; +const fs = require("./fs"); +exports.fs = fs; +const path = require("./path"); +exports.path = path; +const pattern = require("./pattern"); +exports.pattern = pattern; +const stream = require("./stream"); +exports.stream = stream; +const string = require("./string"); +exports.string = string; diff --git a/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/fast-glob/out/utils/path.d.ts new file mode 100644 index 0000000..0b13f4b --- /dev/null +++ b/node_modules/fast-glob/out/utils/path.d.ts @@ -0,0 +1,13 @@ +import { Pattern } from '../types'; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +export declare function unixify(filepath: string): string; +export declare function makeAbsolute(cwd: string, filepath: string): string; +export declare function removeLeadingDotSegment(entry: string): string; +export declare const escape: typeof escapeWindowsPath; +export declare function escapeWindowsPath(pattern: Pattern): Pattern; +export declare function escapePosixPath(pattern: Pattern): Pattern; +export declare const convertPathToPattern: typeof convertWindowsPathToPattern; +export declare function convertWindowsPathToPattern(filepath: string): Pattern; +export declare function convertPosixPathToPattern(filepath: string): Pattern; diff --git a/node_modules/fast-glob/out/utils/path.js b/node_modules/fast-glob/out/utils/path.js new file mode 100644 index 0000000..7b53b39 --- /dev/null +++ b/node_modules/fast-glob/out/utils/path.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; +const os = require("os"); +const path = require("path"); +const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; +const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ +/** + * All non-escaped special characters. + * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. + * Windows: (){}[], !+@ before (, ! at the beginning. + */ +const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; +const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; +/** + * The device path (\\.\ or \\?\). + * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths + */ +const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; +/** + * All backslashes except those escaping special characters. + * Windows: !()+@{} + * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions + */ +const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +function unixify(filepath) { + return filepath.replace(/\\/g, '/'); +} +exports.unixify = unixify; +function makeAbsolute(cwd, filepath) { + return path.resolve(cwd, filepath); +} +exports.makeAbsolute = makeAbsolute; +function removeLeadingDotSegment(entry) { + // We do not use `startsWith` because this is 10x slower than current implementation for some cases. + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (entry.charAt(0) === '.') { + const secondCharactery = entry.charAt(1); + if (secondCharactery === '/' || secondCharactery === '\\') { + return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); + } + } + return entry; +} +exports.removeLeadingDotSegment = removeLeadingDotSegment; +exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; +function escapeWindowsPath(pattern) { + return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escapeWindowsPath = escapeWindowsPath; +function escapePosixPath(pattern) { + return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escapePosixPath = escapePosixPath; +exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; +function convertWindowsPathToPattern(filepath) { + return escapeWindowsPath(filepath) + .replace(DOS_DEVICE_PATH_RE, '//$1') + .replace(WINDOWS_BACKSLASHES_RE, '/'); +} +exports.convertWindowsPathToPattern = convertWindowsPathToPattern; +function convertPosixPathToPattern(filepath) { + return escapePosixPath(filepath); +} +exports.convertPosixPathToPattern = convertPosixPathToPattern; diff --git a/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/fast-glob/out/utils/pattern.d.ts new file mode 100644 index 0000000..e3598a9 --- /dev/null +++ b/node_modules/fast-glob/out/utils/pattern.d.ts @@ -0,0 +1,49 @@ +import { MicromatchOptions, Pattern, PatternRe } from '../types'; +type PatternTypeOptions = { + braceExpansion?: boolean; + caseSensitiveMatch?: boolean; + extglob?: boolean; +}; +export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function convertToPositivePattern(pattern: Pattern): Pattern; +export declare function convertToNegativePattern(pattern: Pattern): Pattern; +export declare function isNegativePattern(pattern: Pattern): boolean; +export declare function isPositivePattern(pattern: Pattern): boolean; +export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; +export declare function getBaseDirectory(pattern: Pattern): string; +export declare function hasGlobStar(pattern: Pattern): boolean; +export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; +export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; +export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; +export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; +export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; +export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; +export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; +export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +export declare function removeDuplicateSlashes(pattern: string): string; +export declare function partitionAbsoluteAndRelative(patterns: Pattern[]): Pattern[][]; +export declare function isAbsolute(pattern: string): boolean; +export {}; diff --git a/node_modules/fast-glob/out/utils/pattern.js b/node_modules/fast-glob/out/utils/pattern.js new file mode 100644 index 0000000..b2924e7 --- /dev/null +++ b/node_modules/fast-glob/out/utils/pattern.js @@ -0,0 +1,206 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isAbsolute = exports.partitionAbsoluteAndRelative = exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; +const path = require("path"); +const globParent = require("glob-parent"); +const micromatch = require("micromatch"); +const GLOBSTAR = '**'; +const ESCAPE_SYMBOL = '\\'; +const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; +const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; +const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; +const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; +const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; +/** + * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. + * The latter is due to the presence of the device path at the beginning of the UNC path. + */ +const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; +function isStaticPattern(pattern, options = {}) { + return !isDynamicPattern(pattern, options); +} +exports.isStaticPattern = isStaticPattern; +function isDynamicPattern(pattern, options = {}) { + /** + * A special case with an empty string is necessary for matching patterns that start with a forward slash. + * An empty string cannot be a dynamic pattern. + * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. + */ + if (pattern === '') { + return false; + } + /** + * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check + * filepath directly (without read directory). + */ + if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { + return true; + } + if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { + return true; + } + return false; +} +exports.isDynamicPattern = isDynamicPattern; +function hasBraceExpansion(pattern) { + const openingBraceIndex = pattern.indexOf('{'); + if (openingBraceIndex === -1) { + return false; + } + const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); + if (closingBraceIndex === -1) { + return false; + } + const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); + return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); +} +function convertToPositivePattern(pattern) { + return isNegativePattern(pattern) ? pattern.slice(1) : pattern; +} +exports.convertToPositivePattern = convertToPositivePattern; +function convertToNegativePattern(pattern) { + return '!' + pattern; +} +exports.convertToNegativePattern = convertToNegativePattern; +function isNegativePattern(pattern) { + return pattern.startsWith('!') && pattern[1] !== '('; +} +exports.isNegativePattern = isNegativePattern; +function isPositivePattern(pattern) { + return !isNegativePattern(pattern); +} +exports.isPositivePattern = isPositivePattern; +function getNegativePatterns(patterns) { + return patterns.filter(isNegativePattern); +} +exports.getNegativePatterns = getNegativePatterns; +function getPositivePatterns(patterns) { + return patterns.filter(isPositivePattern); +} +exports.getPositivePatterns = getPositivePatterns; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsInsideCurrentDirectory(patterns) { + return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); +} +exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsOutsideCurrentDirectory(patterns) { + return patterns.filter(isPatternRelatedToParentDirectory); +} +exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; +function isPatternRelatedToParentDirectory(pattern) { + return pattern.startsWith('..') || pattern.startsWith('./..'); +} +exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; +function getBaseDirectory(pattern) { + return globParent(pattern, { flipBackslashes: false }); +} +exports.getBaseDirectory = getBaseDirectory; +function hasGlobStar(pattern) { + return pattern.includes(GLOBSTAR); +} +exports.hasGlobStar = hasGlobStar; +function endsWithSlashGlobStar(pattern) { + return pattern.endsWith('/' + GLOBSTAR); +} +exports.endsWithSlashGlobStar = endsWithSlashGlobStar; +function isAffectDepthOfReadingPattern(pattern) { + const basename = path.basename(pattern); + return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); +} +exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; +function expandPatternsWithBraceExpansion(patterns) { + return patterns.reduce((collection, pattern) => { + return collection.concat(expandBraceExpansion(pattern)); + }, []); +} +exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; +function expandBraceExpansion(pattern) { + const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); + /** + * Sort the patterns by length so that the same depth patterns are processed side by side. + * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` + */ + patterns.sort((a, b) => a.length - b.length); + /** + * Micromatch can return an empty string in the case of patterns like `{a,}`. + */ + return patterns.filter((pattern) => pattern !== ''); +} +exports.expandBraceExpansion = expandBraceExpansion; +function getPatternParts(pattern, options) { + let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); + /** + * The scan method returns an empty array in some cases. + * See micromatch/picomatch#58 for more details. + */ + if (parts.length === 0) { + parts = [pattern]; + } + /** + * The scan method does not return an empty part for the pattern with a forward slash. + * This is another part of micromatch/picomatch#58. + */ + if (parts[0].startsWith('/')) { + parts[0] = parts[0].slice(1); + parts.unshift(''); + } + return parts; +} +exports.getPatternParts = getPatternParts; +function makeRe(pattern, options) { + return micromatch.makeRe(pattern, options); +} +exports.makeRe = makeRe; +function convertPatternsToRe(patterns, options) { + return patterns.map((pattern) => makeRe(pattern, options)); +} +exports.convertPatternsToRe = convertPatternsToRe; +function matchAny(entry, patternsRe) { + return patternsRe.some((patternRe) => patternRe.test(entry)); +} +exports.matchAny = matchAny; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +function removeDuplicateSlashes(pattern) { + return pattern.replace(DOUBLE_SLASH_RE, '/'); +} +exports.removeDuplicateSlashes = removeDuplicateSlashes; +function partitionAbsoluteAndRelative(patterns) { + const absolute = []; + const relative = []; + for (const pattern of patterns) { + if (isAbsolute(pattern)) { + absolute.push(pattern); + } + else { + relative.push(pattern); + } + } + return [absolute, relative]; +} +exports.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; +function isAbsolute(pattern) { + return path.isAbsolute(pattern); +} +exports.isAbsolute = isAbsolute; diff --git a/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/fast-glob/out/utils/stream.d.ts new file mode 100644 index 0000000..4daf913 --- /dev/null +++ b/node_modules/fast-glob/out/utils/stream.d.ts @@ -0,0 +1,4 @@ +/// +/// +import { Readable } from 'stream'; +export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/fast-glob/out/utils/stream.js b/node_modules/fast-glob/out/utils/stream.js new file mode 100644 index 0000000..b32028c --- /dev/null +++ b/node_modules/fast-glob/out/utils/stream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.merge = void 0; +const merge2 = require("merge2"); +function merge(streams) { + const mergedStream = merge2(streams); + streams.forEach((stream) => { + stream.once('error', (error) => mergedStream.emit('error', error)); + }); + mergedStream.once('close', () => propagateCloseEventToSources(streams)); + mergedStream.once('end', () => propagateCloseEventToSources(streams)); + return mergedStream; +} +exports.merge = merge; +function propagateCloseEventToSources(streams) { + streams.forEach((stream) => stream.emit('close')); +} diff --git a/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/fast-glob/out/utils/string.d.ts new file mode 100644 index 0000000..c884735 --- /dev/null +++ b/node_modules/fast-glob/out/utils/string.d.ts @@ -0,0 +1,2 @@ +export declare function isString(input: unknown): input is string; +export declare function isEmpty(input: string): boolean; diff --git a/node_modules/fast-glob/out/utils/string.js b/node_modules/fast-glob/out/utils/string.js new file mode 100644 index 0000000..76e7ea5 --- /dev/null +++ b/node_modules/fast-glob/out/utils/string.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmpty = exports.isString = void 0; +function isString(input) { + return typeof input === 'string'; +} +exports.isString = isString; +function isEmpty(input) { + return input === ''; +} +exports.isEmpty = isEmpty; diff --git a/node_modules/fast-glob/package.json b/node_modules/fast-glob/package.json new file mode 100644 index 0000000..e910de9 --- /dev/null +++ b/node_modules/fast-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "fast-glob", + "version": "3.3.3", + "description": "It's a very fast and efficient glob library for Node.js", + "license": "MIT", + "repository": "mrmlnc/fast-glob", + "author": { + "name": "Denis Malinochkin", + "url": "https://mrmlnc.com" + }, + "engines": { + "node": ">=8.6.0" + }, + "main": "out/index.js", + "typings": "out/index.d.ts", + "files": [ + "out", + "!out/{benchmark,tests}", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "keywords": [ + "glob", + "patterns", + "fast", + "implementation" + ], + "devDependencies": { + "@nodelib/fs.macchiato": "^1.0.1", + "@types/glob-parent": "^5.1.0", + "@types/merge2": "^1.1.4", + "@types/micromatch": "^4.0.0", + "@types/mocha": "^5.2.7", + "@types/node": "^14.18.53", + "@types/picomatch": "^2.3.0", + "@types/sinon": "^7.5.0", + "bencho": "^0.1.1", + "eslint": "^6.5.1", + "eslint-config-mrmlnc": "^1.1.0", + "execa": "^7.1.1", + "fast-glob": "^3.0.4", + "fdir": "6.0.1", + "glob": "^10.0.0", + "hereby": "^1.8.1", + "mocha": "^6.2.1", + "rimraf": "^5.0.0", + "sinon": "^7.5.0", + "snap-shot-it": "^7.9.10", + "typescript": "^4.9.5" + }, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "scripts": { + "clean": "rimraf out", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "test:e2e": "mocha \"out/**/*.e2e.js\" -s 0", + "test:e2e:sync": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(sync\\)\"", + "test:e2e:async": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(async\\)\"", + "test:e2e:stream": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(stream\\)\"", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile -- -- --sourceMap --watch", + "bench:async": "npm run bench:product:async && npm run bench:regression:async", + "bench:stream": "npm run bench:product:stream && npm run bench:regression:stream", + "bench:sync": "npm run bench:product:sync && npm run bench:regression:sync", + "bench:product": "npm run bench:product:async && npm run bench:product:sync && npm run bench:product:stream", + "bench:product:async": "hereby bench:product:async", + "bench:product:sync": "hereby bench:product:sync", + "bench:product:stream": "hereby bench:product:stream", + "bench:regression": "npm run bench:regression:async && npm run bench:regression:sync && npm run bench:regression:stream", + "bench:regression:async": "hereby bench:regression:async", + "bench:regression:sync": "hereby bench:regression:sync", + "bench:regression:stream": "hereby bench:regression:stream" + } +} diff --git a/node_modules/fastq/.github/dependabot.yml b/node_modules/fastq/.github/dependabot.yml new file mode 100644 index 0000000..7e7cbe1 --- /dev/null +++ b/node_modules/fastq/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: standard + versions: + - 16.0.3 diff --git a/node_modules/fastq/.github/workflows/ci.yml b/node_modules/fastq/.github/workflows/ci.yml new file mode 100644 index 0000000..09dc7a3 --- /dev/null +++ b/node_modules/fastq/.github/workflows/ci.yml @@ -0,0 +1,75 @@ +name: ci + +on: [push, pull_request] + +jobs: + legacy: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --production && npm install tape + + - name: Run tests + run: | + npm run legacy + + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [18.x, 20.x, 22.x] + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test + + types: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Install + run: | + npm install + + - name: Run types tests + run: | + npm run typescript diff --git a/node_modules/fastq/LICENSE b/node_modules/fastq/LICENSE new file mode 100644 index 0000000..27c7bb4 --- /dev/null +++ b/node_modules/fastq/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015-2020, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fastq/README.md b/node_modules/fastq/README.md new file mode 100644 index 0000000..1644111 --- /dev/null +++ b/node_modules/fastq/README.md @@ -0,0 +1,312 @@ +# fastq + +![ci][ci-url] +[![npm version][npm-badge]][npm-url] + +Fast, in memory work queue. + +Benchmarks (1 million tasks): + +* setImmediate: 812ms +* fastq: 854ms +* async.queue: 1298ms +* neoAsync.queue: 1249ms + +Obtained on node 12.16.1, on a dedicated server. + +If you need zero-overhead series function call, check out +[fastseries](http://npm.im/fastseries). For zero-overhead parallel +function call, check out [fastparallel](http://npm.im/fastparallel). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + + * Installation + * Usage + * API + * Licence & copyright + +## Install + +`npm i fastq --save` + +## Usage (callback API) + +```js +'use strict' + +const queue = require('fastq')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, arg * 2) +} +``` + +## Usage (promise API) + +```js +const queue = require('fastq').promise(worker, 1) + +async function worker (arg) { + return arg * 2 +} + +async function run () { + const result = await queue.push(42) + console.log('the result is', result) +} + +run() +``` + +### Setting "this" + +```js +'use strict' + +const that = { hello: 'world' } +const queue = require('fastq')(that, worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log(this) + console.log('the result is', result) +}) + +function worker (arg, cb) { + console.log(this) + cb(null, arg * 2) +} +``` + +### Using with TypeScript (callback API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queue, done } from "fastq"; + +type Task = { + id: number +} + +const q: queue = fastq(worker, 1) + +q.push({ id: 42}) + +function worker (arg: Task, cb: done) { + console.log(arg.id) + cb(null) +} +``` + +### Using with TypeScript (promise API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queueAsPromised } from "fastq"; + +type Task = { + id: number +} + +const q: queueAsPromised = fastq.promise(asyncWorker, 1) + +q.push({ id: 42}).catch((err) => console.error(err)) + +async function asyncWorker (arg: Task): Promise { + // No need for a try-catch block, fastq handles errors automatically + console.log(arg.id) +} +``` + +## API + +* fastqueue() +* queue#push() +* queue#unshift() +* queue#pause() +* queue#resume() +* queue#idle() +* queue#length() +* queue#getQueue() +* queue#kill() +* queue#killAndDrain() +* queue#error() +* queue#concurrency +* queue#drain +* queue#empty +* queue#saturated +* fastqueue.promise() + +------------------------------------------------------- + +### fastqueue([that], worker, concurrency) + +Creates a new queue. + +Arguments: + +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + +------------------------------------------------------- + +### queue.push(task, done) + +Add a task at the end of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.unshift(task, done) + +Add a task at the beginning of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.pause() + +Pause the processing of tasks. Currently worked tasks are not +stopped. + +------------------------------------------------------- + +### queue.resume() + +Resume the processing of tasks. + +------------------------------------------------------- + +### queue.idle() + +Returns `false` if there are tasks being processed or waiting to be processed. +`true` otherwise. + +------------------------------------------------------- + +### queue.length() + +Returns the number of tasks waiting to be processed (in the queue). + +------------------------------------------------------- + +### queue.getQueue() + +Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks + +------------------------------------------------------- + +### queue.kill() + +Removes all tasks waiting to be processed, and reset `drain` to an empty +function. + +------------------------------------------------------- + +### queue.killAndDrain() + +Same than `kill` but the `drain` function will be called before reset to empty. + +------------------------------------------------------- + +### queue.error(handler) + +Set a global error handler. `handler(err, task)` will be called +each time a task is completed, `err` will be not null if the task has thrown an error. + +------------------------------------------------------- + +### queue.concurrency + +Property that returns the number of concurrent tasks that could be executed in +parallel. It can be altered at runtime. + +------------------------------------------------------- + +### queue.paused + +Property (Read-Only) that returns `true` when the queue is in a paused state. + +------------------------------------------------------- + +### queue.drain + +Function that will be called when the last +item from the queue has been processed by a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.empty + +Function that will be called when the last +item from the queue has been assigned to a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.saturated + +Function that will be called when the queue hits the concurrency +limit. +It can be altered at runtime. + +------------------------------------------------------- + +### fastqueue.promise([that], worker(arg), concurrency) + +Creates a new queue with `Promise` apis. It also offers all the methods +and properties of the object returned by [`fastqueue`](#fastqueue) with the modified +[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. + +Node v10+ is required to use the promisified version. + +Arguments: +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. It MUST return a `Promise`. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + + +#### queue.push(task) => Promise + +Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.unshift(task) => Promise + +Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.drained() => Promise + +Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + +## License + +ISC + +[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg +[npm-badge]: https://badge.fury.io/js/fastq.svg +[npm-url]: https://badge.fury.io/js/fastq diff --git a/node_modules/fastq/SECURITY.md b/node_modules/fastq/SECURITY.md new file mode 100644 index 0000000..dd9f1d5 --- /dev/null +++ b/node_modules/fastq/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 1.x | :white_check_mark: | +| < 1.0 | :x: | + +## Reporting a Vulnerability + +Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/fastq/bench.js b/node_modules/fastq/bench.js new file mode 100644 index 0000000..4eaa829 --- /dev/null +++ b/node_modules/fastq/bench.js @@ -0,0 +1,66 @@ +'use strict' + +const max = 1000000 +const fastqueue = require('./')(worker, 1) +const { promisify } = require('util') +const immediate = promisify(setImmediate) +const qPromise = require('./').promise(immediate, 1) +const async = require('async') +const neo = require('neo-async') +const asyncqueue = async.queue(worker, 1) +const neoqueue = neo.queue(worker, 1) + +function bench (func, done) { + const key = max + '*' + func.name + let count = -1 + + console.time(key) + end() + + function end () { + if (++count < max) { + func(end) + } else { + console.timeEnd(key) + if (done) { + done() + } + } + } +} + +function benchFastQ (done) { + fastqueue.push(42, done) +} + +function benchAsyncQueue (done) { + asyncqueue.push(42, done) +} + +function benchNeoQueue (done) { + neoqueue.push(42, done) +} + +function worker (arg, cb) { + setImmediate(cb) +} + +function benchSetImmediate (cb) { + worker(42, cb) +} + +function benchFastQPromise (done) { + qPromise.push(42).then(function () { done() }, done) +} + +function runBench (done) { + async.eachSeries([ + benchSetImmediate, + benchFastQ, + benchNeoQueue, + benchAsyncQueue, + benchFastQPromise + ], bench, done) +} + +runBench(runBench) diff --git a/node_modules/fastq/example.js b/node_modules/fastq/example.js new file mode 100644 index 0000000..665fdc8 --- /dev/null +++ b/node_modules/fastq/example.js @@ -0,0 +1,14 @@ +'use strict' + +/* eslint-disable no-var */ + +var queue = require('./')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, 42 * 2) +} diff --git a/node_modules/fastq/example.mjs b/node_modules/fastq/example.mjs new file mode 100644 index 0000000..81be789 --- /dev/null +++ b/node_modules/fastq/example.mjs @@ -0,0 +1,11 @@ +import { promise as queueAsPromised } from './queue.js' + +/* eslint-disable */ + +const queue = queueAsPromised(worker, 1) + +console.log('the result is', await queue.push(42)) + +async function worker (arg) { + return 42 * 2 +} diff --git a/node_modules/fastq/index.d.ts b/node_modules/fastq/index.d.ts new file mode 100644 index 0000000..817cdb5 --- /dev/null +++ b/node_modules/fastq/index.d.ts @@ -0,0 +1,57 @@ +declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue +declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue + +declare namespace fastq { + type worker = (this: C, task: T, cb: fastq.done) => void + type asyncWorker = (this: C, task: T) => Promise + type done = (err: Error | null, result?: R) => void + type errorHandler = (err: Error, task: T) => void + + interface queue { + /** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */ + push(task: T, done?: done): void + /** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */ + unshift(task: T, done?: done): void + /** Pause the processing of tasks. Currently worked tasks are not stopped. */ + pause(): any + /** Resume the processing of tasks. */ + resume(): any + running(): number + /** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */ + idle(): boolean + /** Returns the number of tasks waiting to be processed (in the queue). */ + length(): number + /** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */ + getQueue(): T[] + /** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */ + kill(): any + /** Same than `kill` but the `drain` function will be called before reset to empty. */ + killAndDrain(): any + /** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */ + error(handler: errorHandler): void + /** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */ + concurrency: number + /** Property (Read-Only) that returns `true` when the queue is in a paused state. */ + readonly paused: boolean + /** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */ + drain(): any + /** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */ + empty: () => void + /** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */ + saturated: () => void + } + + interface queueAsPromised extends queue { + /** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ + push(task: T): Promise + /** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ + unshift(task: T): Promise + /** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */ + drained(): Promise + } + + function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised + function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised +} + +export = fastq diff --git a/node_modules/fastq/package.json b/node_modules/fastq/package.json new file mode 100644 index 0000000..989151f --- /dev/null +++ b/node_modules/fastq/package.json @@ -0,0 +1,53 @@ +{ + "name": "fastq", + "version": "1.19.1", + "description": "Fast, in memory work queue", + "main": "queue.js", + "scripts": { + "lint": "standard --verbose | snazzy", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit", + "typescript": "tsc --project ./test/tsconfig.json", + "legacy": "tape test/test.js" + }, + "pre-commit": [ + "test", + "typescript" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastq.git" + }, + "keywords": [ + "fast", + "queue", + "async", + "worker" + ], + "author": "Matteo Collina ", + "license": "ISC", + "bugs": { + "url": "https://github.com/mcollina/fastq/issues" + }, + "homepage": "https://github.com/mcollina/fastq#readme", + "devDependencies": { + "async": "^3.1.0", + "neo-async": "^2.6.1", + "nyc": "^17.0.0", + "pre-commit": "^1.2.2", + "snazzy": "^9.0.0", + "standard": "^16.0.0", + "tape": "^5.0.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "reusify": "^1.0.4" + }, + "standard": { + "ignore": [ + "example.mjs" + ] + } +} diff --git a/node_modules/fastq/queue.js b/node_modules/fastq/queue.js new file mode 100644 index 0000000..7ea8a31 --- /dev/null +++ b/node_modules/fastq/queue.js @@ -0,0 +1,311 @@ +'use strict' + +/* eslint-disable no-var */ + +var reusify = require('reusify') + +function fastqueue (context, worker, _concurrency) { + if (typeof context === 'function') { + _concurrency = worker + worker = context + context = null + } + + if (!(_concurrency >= 1)) { + throw new Error('fastqueue concurrency must be equal to or greater than 1') + } + + var cache = reusify(Task) + var queueHead = null + var queueTail = null + var _running = 0 + var errorHandler = null + + var self = { + push: push, + drain: noop, + saturated: noop, + pause: pause, + paused: false, + + get concurrency () { + return _concurrency + }, + set concurrency (value) { + if (!(value >= 1)) { + throw new Error('fastqueue concurrency must be equal to or greater than 1') + } + _concurrency = value + + if (self.paused) return + for (; queueHead && _running < _concurrency;) { + _running++ + release() + } + }, + + running: running, + resume: resume, + idle: idle, + length: length, + getQueue: getQueue, + unshift: unshift, + empty: noop, + kill: kill, + killAndDrain: killAndDrain, + error: error + } + + return self + + function running () { + return _running + } + + function pause () { + self.paused = true + } + + function length () { + var current = queueHead + var counter = 0 + + while (current) { + current = current.next + counter++ + } + + return counter + } + + function getQueue () { + var current = queueHead + var tasks = [] + + while (current) { + tasks.push(current.value) + current = current.next + } + + return tasks + } + + function resume () { + if (!self.paused) return + self.paused = false + if (queueHead === null) { + _running++ + release() + return + } + for (; queueHead && _running < _concurrency;) { + _running++ + release() + } + } + + function idle () { + return _running === 0 && self.length() === 0 + } + + function push (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running >= _concurrency || self.paused) { + if (queueTail) { + queueTail.next = current + queueTail = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function unshift (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running >= _concurrency || self.paused) { + if (queueHead) { + current.next = queueHead + queueHead = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function release (holder) { + if (holder) { + cache.release(holder) + } + var next = queueHead + if (next && _running <= _concurrency) { + if (!self.paused) { + if (queueTail === queueHead) { + queueTail = null + } + queueHead = next.next + next.next = null + worker.call(context, next.value, next.worked) + if (queueTail === null) { + self.empty() + } + } else { + _running-- + } + } else if (--_running === 0) { + self.drain() + } + } + + function kill () { + queueHead = null + queueTail = null + self.drain = noop + } + + function killAndDrain () { + queueHead = null + queueTail = null + self.drain() + self.drain = noop + } + + function error (handler) { + errorHandler = handler + } +} + +function noop () {} + +function Task () { + this.value = null + this.callback = noop + this.next = null + this.release = noop + this.context = null + this.errorHandler = null + + var self = this + + this.worked = function worked (err, result) { + var callback = self.callback + var errorHandler = self.errorHandler + var val = self.value + self.value = null + self.callback = noop + if (self.errorHandler) { + errorHandler(err, val) + } + callback.call(self.context, err, result) + self.release(self) + } +} + +function queueAsPromised (context, worker, _concurrency) { + if (typeof context === 'function') { + _concurrency = worker + worker = context + context = null + } + + function asyncWrapper (arg, cb) { + worker.call(this, arg) + .then(function (res) { + cb(null, res) + }, cb) + } + + var queue = fastqueue(context, asyncWrapper, _concurrency) + + var pushCb = queue.push + var unshiftCb = queue.unshift + + queue.push = push + queue.unshift = unshift + queue.drained = drained + + return queue + + function push (value) { + var p = new Promise(function (resolve, reject) { + pushCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function unshift (value) { + var p = new Promise(function (resolve, reject) { + unshiftCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function drained () { + var p = new Promise(function (resolve) { + process.nextTick(function () { + if (queue.idle()) { + resolve() + } else { + var previousDrain = queue.drain + queue.drain = function () { + if (typeof previousDrain === 'function') previousDrain() + resolve() + queue.drain = previousDrain + } + } + }) + }) + + return p + } +} + +module.exports = fastqueue +module.exports.promise = queueAsPromised diff --git a/node_modules/fastq/test/example.ts b/node_modules/fastq/test/example.ts new file mode 100644 index 0000000..a47d441 --- /dev/null +++ b/node_modules/fastq/test/example.ts @@ -0,0 +1,83 @@ +import * as fastq from '../' +import { promise as queueAsPromised } from '../' + +// Basic example + +const queue = fastq(worker, 1) + +queue.push('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.push('push without cb') + +queue.concurrency + +queue.drain() + +queue.empty = () => undefined + +console.log('the queue tasks are', queue.getQueue()) + +queue.idle() + +queue.kill() + +queue.killAndDrain() + +queue.length + +queue.pause() + +queue.resume() + +queue.running() + +queue.saturated = () => undefined + +queue.unshift('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.unshift('unshift without cb') + +function worker(task: any, cb: fastq.done) { + cb(null, 'hello ' + task) +} + +// Generics example + +interface GenericsContext { + base: number; +} + +const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) + +genericsQueue.push(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +genericsQueue.unshift(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { + cb(null, 'the meaning of life is ' + (this.base * task)) +} + +const queue2 = queueAsPromised(asyncWorker, 1) + +async function asyncWorker(task: any) { + return 'hello ' + task +} + +async function run () { + await queue.push(42) + await queue.unshift(42) +} + +run() diff --git a/node_modules/fastq/test/promise.js b/node_modules/fastq/test/promise.js new file mode 100644 index 0000000..45349a4 --- /dev/null +++ b/node_modules/fastq/test/promise.js @@ -0,0 +1,291 @@ +'use strict' + +const test = require('tape') +const buildQueue = require('../').promise +const { promisify } = require('util') +const sleep = promisify(setTimeout) +const immediate = promisify(setImmediate) + +test('concurrency', function (t) { + t.plan(2) + t.throws(buildQueue.bind(null, worker, 0)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + async function worker (arg) { + return true + } +}) + +test('worker execution', async function (t) { + const queue = buildQueue(worker, 1) + + const result = await queue.push(42) + + t.equal(result, true, 'result matches') + + async function worker (arg) { + t.equal(arg, 42) + return true + } +}) + +test('limit', async function (t) { + const queue = buildQueue(worker, 1) + + const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) + t.equal(res1, 10, 'the result matches') + t.equal(res2, 0, 'the result matches') + + async function worker (arg) { + await sleep(arg) + return arg + } +}) + +test('multiple executions', async function (t) { + const queue = buildQueue(worker, 1) + const toExec = [1, 2, 3, 4, 5] + const expected = ['a', 'b', 'c', 'd', 'e'] + let count = 0 + + await Promise.all(toExec.map(async function (task, i) { + const result = await queue.push(task) + t.equal(result, expected[i], 'the result matches') + })) + + async function worker (arg) { + t.equal(arg, toExec[count], 'arg matches') + return expected[count++] + } +}) + +test('drained', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length) + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length * 2) +}) + +test('drained with exception should not throw', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + + async function worker () { + throw new Error('foo') + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() +}) + +test('drained with drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function () { + queue.push() + }) + + await queue.drained() + + t.equal(count, toExec.length) + t.equal(drainCalled, true) +}) + +test('drained while idle should resolve', async function (t) { + const queue = buildQueue(worker, 2) + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() +}) + +test('drained while idle should not call the drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() + + t.equal(drainCalled, false) +}) + +test('set this', async function (t) { + t.plan(1) + const that = {} + const queue = buildQueue(that, worker, 1) + + await queue.push(42) + + async function worker (arg) { + t.equal(this, that, 'this matches') + } +}) + +test('unshift', async function (t) { + const queue = buildQueue(worker, 1) + const expected = [1, 2, 3, 4] + + await Promise.all([ + queue.push(1), + queue.push(4), + queue.unshift(3), + queue.unshift(2) + ]) + + t.is(expected.length, 0) + + async function worker (arg) { + t.equal(expected.shift(), arg, 'tasks come in order') + } +}) + +test('push with worker throwing error', async function (t) { + t.plan(5) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + try { + await q.push(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('unshift with worker throwing error', async function (t) { + t.plan(2) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + try { + await q.unshift(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('no unhandledRejection (push)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.push(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) + +test('no unhandledRejection (unshift)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.unshift(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) + +test('drained should resolve after async tasks complete', async function (t) { + const logs = [] + + async function processTask () { + await new Promise(resolve => setTimeout(resolve, 0)) + logs.push('processed') + } + + const queue = buildQueue(processTask, 1) + queue.drain = () => logs.push('called drain') + + queue.drained().then(() => logs.push('drained promise resolved')) + + await Promise.all([ + queue.push(), + queue.push(), + queue.push() + ]) + + t.deepEqual(logs, [ + 'processed', + 'processed', + 'processed', + 'called drain', + 'drained promise resolved' + ], 'events happened in correct order') +}) + +test('drained should handle undefined drain function', async function (t) { + const queue = buildQueue(worker, 1) + + async function worker (arg) { + await sleep(10) + return arg + } + + queue.drain = undefined + queue.push(1) + await queue.drained() + + t.pass('drained resolved successfully with undefined drain') +}) diff --git a/node_modules/fastq/test/test.js b/node_modules/fastq/test/test.js new file mode 100644 index 0000000..79f0f6c --- /dev/null +++ b/node_modules/fastq/test/test.js @@ -0,0 +1,653 @@ +'use strict' + +/* eslint-disable no-var */ + +var test = require('tape') +var buildQueue = require('../') + +test('concurrency', function (t) { + t.plan(6) + t.throws(buildQueue.bind(null, worker, 0)) + t.throws(buildQueue.bind(null, worker, NaN)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + var queue = buildQueue(worker, 1) + t.throws(function () { + queue.concurrency = 0 + }) + t.throws(function () { + queue.concurrency = NaN + }) + t.doesNotThrow(function () { + queue.concurrency = 2 + }) + + function worker (arg, cb) { + cb(null, true) + } +}) + +test('worker execution', function (t) { + t.plan(3) + + var queue = buildQueue(worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + function worker (arg, cb) { + t.equal(arg, 42) + cb(null, true) + } +}) + +test('limit', function (t) { + t.plan(4) + + var expected = [10, 0] + var queue = buildQueue(worker, 1) + + queue.push(10, result) + queue.push(0, result) + + function result (err, arg) { + t.error(err, 'no error') + t.equal(arg, expected.shift(), 'the result matches') + } + + function worker (arg, cb) { + setTimeout(cb, arg, null, arg) + } +}) + +test('multiple executions', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + toExec.forEach(function (task) { + queue.push(task, done) + }) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('multiple executions, one after another', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + queue.push(toExec[0], done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + if (count < toExec.length) { + queue.push(toExec[count], done) + } + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('set this', function (t) { + t.plan(3) + + var that = {} + var queue = buildQueue(that, worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(this, that, 'this matches') + }) + + function worker (arg, cb) { + t.equal(this, that, 'this matches') + cb(null, true) + } +}) + +test('drain', function (t) { + t.plan(4) + + var queue = buildQueue(worker, 1) + var worked = false + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + queue.drain = function () { + t.equal(true, worked, 'drained') + } + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + setImmediate(cb, null, true) + } +}) + +test('pause && resume', function (t) { + t.plan(13) + + var queue = buildQueue(worker, 1) + var worked = false + var expected = [42, 24] + + t.notOk(queue.paused, 'it should not be paused') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + queue.push(24, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + + queue.resume() + queue.pause() + queue.resume() + queue.resume() // second resume is a no-op + + function worker (arg, cb) { + t.notOk(queue.paused, 'it should not be paused') + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + t.equal(arg, expected.shift()) + worked = true + process.nextTick(function () { cb(null, true) }) + } +}) + +test('pause in flight && resume', function (t) { + t.plan(16) + + var queue = buildQueue(worker, 1) + var expected = [42, 24, 12] + + t.notOk(queue.paused, 'it should not be paused') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.ok(queue.paused, 'it should be paused') + process.nextTick(function () { + queue.resume() + queue.pause() + queue.resume() + }) + }) + + queue.push(24, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.paused, 'it should not be paused') + }) + + queue.push(12, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.paused, 'it should not be paused') + }) + + queue.pause() + + function worker (arg, cb) { + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + t.equal(arg, expected.shift()) + process.nextTick(function () { cb(null, true) }) + } +}) + +test('altering concurrency', function (t) { + t.plan(24) + + var queue = buildQueue(worker, 1) + + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + + queue.pause() + + queue.concurrency = 3 // concurrency changes are ignored while paused + queue.concurrency = 2 + + queue.resume() + + t.equal(queue.running(), 2, '2 jobs running') + + queue.concurrency = 3 + + t.equal(queue.running(), 3, '3 jobs running') + + queue.concurrency = 1 + + t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed + + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + + function workDone (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('idle()', function (t) { + t.plan(12) + + var queue = buildQueue(worker, 1) + + t.ok(queue.idle(), 'queue is idle') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.idle(), 'queue is not idle') + }) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + // it will go idle after executing this function + setImmediate(function () { + t.ok(queue.idle(), 'queue is now idle') + }) + }) + + t.notOk(queue.idle(), 'queue is not idle') + + function worker (arg, cb) { + t.notOk(queue.idle(), 'queue is not idle') + t.equal(arg, 42) + setImmediate(cb, null, true) + } +}) + +test('saturated', function (t) { + t.plan(9) + + var queue = buildQueue(worker, 1) + var preworked = 0 + var worked = 0 + + queue.saturated = function () { + t.pass('saturated') + t.equal(preworked, 1, 'started 1 task') + t.equal(worked, 0, 'worked zero task') + } + + queue.push(42, done) + queue.push(42, done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.equal(arg, 42) + preworked++ + setImmediate(function () { + worked++ + cb(null, true) + }) + } +}) + +test('length', function (t) { + t.plan(7) + + var queue = buildQueue(worker, 1) + + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 1, 'one task waiting') + queue.push(42, done) + t.equal(queue.length(), 2, 'two tasks waiting') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('getQueue', function (t) { + t.plan(10) + + var queue = buildQueue(worker, 1) + + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 1, 'one task waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + queue.push(43, done) + t.equal(queue.getQueue().length, 2, 'two tasks waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + t.equal(queue.getQueue()[1], 43, 'should be equal') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift', function (t) { + t.plan(8) + + var queue = buildQueue(worker, 1) + var expected = [1, 2, 3, 4] + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.unshift(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('push && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.push(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('kill', function (t) { + t.plan(5) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.fail('drain should never be called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.kill() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('killAndDrain', function (t) { + t.plan(6) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.pass('drain has been called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.killAndDrain() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('pause && idle', function (t) { + t.plan(11) + + var queue = buildQueue(worker, 1) + var worked = false + + t.notOk(queue.paused, 'it should not be paused') + t.ok(queue.idle(), 'should be idle') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + t.notOk(queue.idle(), 'should not be idle') + + queue.resume() + + t.notOk(queue.paused, 'it should not be paused') + t.notOk(queue.idle(), 'it should not be idle') + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + process.nextTick(cb.bind(null, null, true)) + process.nextTick(function () { + t.ok(queue.idle(), 'is should be idle') + }) + } +}) + +test('push without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.push(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('unshift without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.unshift(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('push with worker throwing error', function (t) { + t.plan(5) + var q = buildQueue(function (task, cb) { + cb(new Error('test error'), null) + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + q.push(42, function (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + }) +}) + +test('unshift with worker throwing error', function (t) { + t.plan(5) + var q = buildQueue(function (task, cb) { + cb(new Error('test error'), null) + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + q.unshift(42, function (err) { + t.ok(err instanceof Error, 'unshift callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + }) +}) + +test('pause/resume should trigger drain event', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + queue.pause() + queue.drain = function () { + t.pass('drain should be called') + } + + function worker (arg, cb) { + cb(null, true) + } + + queue.resume() +}) + +test('paused flag', function (t) { + t.plan(2) + + var queue = buildQueue(function (arg, cb) { + cb(null) + }, 1) + t.equal(queue.paused, false) + queue.pause() + t.equal(queue.paused, true) +}) diff --git a/node_modules/fastq/test/tsconfig.json b/node_modules/fastq/test/tsconfig.json new file mode 100644 index 0000000..66e16e9 --- /dev/null +++ b/node_modules/fastq/test/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "files": [ + "./example.ts" + ] +} diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/fill-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md new file mode 100644 index 0000000..8d756fe --- /dev/null +++ b/node_modules/fill-range/README.md @@ -0,0 +1,237 @@ +# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) + +> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save fill-range +``` + +## Usage + +Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. + +```js +const fill = require('fill-range'); +// fill(from, to[, step, options]); + +console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] +console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 +``` + +**Params** + +* `from`: **{String|Number}** the number or letter to start with +* `to`: **{String|Number}** the number or letter to end with +* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. +* `options`: **{Object|Function}**: See all available [options](#options) + +## Examples + +By default, an array of values is returned. + +**Alphabetical ranges** + +```js +console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] +console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] +``` + +**Numerical ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] +``` + +**Negative ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] +console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] +``` + +**Steps (increments)** + +```js +// numerical ranges with increments +console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] +console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] +console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] + +// alphabetical ranges with increments +console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] +``` + +## Options + +### options.step + +**Type**: `number` (formatted as a string or number) + +**Default**: `undefined` + +**Description**: The increment to use for the range. Can be used with letters or numbers. + +**Example(s)** + +```js +// numbers +console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] +console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] +console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] + +// letters +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] +console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] +``` + +### options.strictRanges + +**Type**: `boolean` + +**Default**: `false` + +**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. + +**Example(s)** + +The following are all invalid: + +```js +fill('1.1', '2'); // decimals not supported in ranges +fill('a', '2'); // incompatible range values +fill(1, 10, 'foo'); // invalid "step" argument +``` + +### options.stringify + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Cast all returned values to strings. By default, integers are returned as numbers. + +**Example(s)** + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] +``` + +### options.toRegex + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Create a regex-compatible source string, instead of expanding values to an array. + +**Example(s)** + +```js +// alphabetical range +console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' +// alphabetical with step +console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' +// numerical range +console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' +// numerical range with zero padding +console.log(fill('000001', '100000', { toRegex: true })); +//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' +``` + +### options.transform + +**Type**: `function` + +**Default**: `undefined` + +**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. + +**Example(s)** + +```js +// add zero padding +console.log(fill(1, 5, value => String(value).padStart(4, '0'))); +//=> ['0001', '0002', '0003', '0004', '0005'] +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 116 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [paulmillr](https://github.com/paulmillr) | +| 2 | [realityking](https://github.com/realityking) | +| 2 | [bluelovers](https://github.com/bluelovers) | +| 1 | [edorivai](https://github.com/edorivai) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js new file mode 100644 index 0000000..ddb212e --- /dev/null +++ b/node_modules/fill-range/index.js @@ -0,0 +1,248 @@ +/*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +const util = require('util'); +const toRegexRange = require('to-regex-range'); + +const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); + +const transform = toNumber => { + return value => toNumber === true ? Number(value) : String(value); +}; + +const isValidValue = value => { + return typeof value === 'number' || (typeof value === 'string' && value !== ''); +}; + +const isNumber = num => Number.isInteger(+num); + +const zeros = input => { + let value = `${input}`; + let index = -1; + if (value[0] === '-') value = value.slice(1); + if (value === '0') return false; + while (value[++index] === '0'); + return index > 0; +}; + +const stringify = (start, end, options) => { + if (typeof start === 'string' || typeof end === 'string') { + return true; + } + return options.stringify === true; +}; + +const pad = (input, maxLength, toNumber) => { + if (maxLength > 0) { + let dash = input[0] === '-' ? '-' : ''; + if (dash) input = input.slice(1); + input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); + } + if (toNumber === false) { + return String(input); + } + return input; +}; + +const toMaxLen = (input, maxLength) => { + let negative = input[0] === '-' ? '-' : ''; + if (negative) { + input = input.slice(1); + maxLength--; + } + while (input.length < maxLength) input = '0' + input; + return negative ? ('-' + input) : input; +}; + +const toSequence = (parts, options, maxLen) => { + parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + + let prefix = options.capture ? '' : '?:'; + let positives = ''; + let negatives = ''; + let result; + + if (parts.positives.length) { + positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); + } + + if (parts.negatives.length) { + negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; + } + + if (positives && negatives) { + result = `${positives}|${negatives}`; + } else { + result = positives || negatives; + } + + if (options.wrap) { + return `(${prefix}${result})`; + } + + return result; +}; + +const toRange = (a, b, isNumbers, options) => { + if (isNumbers) { + return toRegexRange(a, b, { wrap: false, ...options }); + } + + let start = String.fromCharCode(a); + if (a === b) return start; + + let stop = String.fromCharCode(b); + return `[${start}-${stop}]`; +}; + +const toRegex = (start, end, options) => { + if (Array.isArray(start)) { + let wrap = options.wrap === true; + let prefix = options.capture ? '' : '?:'; + return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); + } + return toRegexRange(start, end, options); +}; + +const rangeError = (...args) => { + return new RangeError('Invalid range arguments: ' + util.inspect(...args)); +}; + +const invalidRange = (start, end, options) => { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; +}; + +const invalidStep = (step, options) => { + if (options.strictRanges === true) { + throw new TypeError(`Expected step "${step}" to be a number`); + } + return []; +}; + +const fillNumbers = (start, end, step = 1, options = {}) => { + let a = Number(start); + let b = Number(end); + + if (!Number.isInteger(a) || !Number.isInteger(b)) { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; + } + + // fix negative zero + if (a === 0) a = 0; + if (b === 0) b = 0; + + let descending = a > b; + let startString = String(start); + let endString = String(end); + let stepString = String(step); + step = Math.max(Math.abs(step), 1); + + let padded = zeros(startString) || zeros(endString) || zeros(stepString); + let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; + let toNumber = padded === false && stringify(start, end, options) === false; + let format = options.transform || transform(toNumber); + + if (options.toRegex && step === 1) { + return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); + } + + let parts = { negatives: [], positives: [] }; + let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + if (options.toRegex === true && step > 1) { + push(a); + } else { + range.push(pad(format(a, index), maxLen, toNumber)); + } + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return step > 1 + ? toSequence(parts, options, maxLen) + : toRegex(range, null, { wrap: false, ...options }); + } + + return range; +}; + +const fillLetters = (start, end, step = 1, options = {}) => { + if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { + return invalidRange(start, end, options); + } + + let format = options.transform || (val => String.fromCharCode(val)); + let a = `${start}`.charCodeAt(0); + let b = `${end}`.charCodeAt(0); + + let descending = a > b; + let min = Math.min(a, b); + let max = Math.max(a, b); + + if (options.toRegex && step === 1) { + return toRange(min, max, false, options); + } + + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + range.push(format(a, index)); + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return toRegex(range, null, { wrap: false, options }); + } + + return range; +}; + +const fill = (start, end, step, options = {}) => { + if (end == null && isValidValue(start)) { + return [start]; + } + + if (!isValidValue(start) || !isValidValue(end)) { + return invalidRange(start, end, options); + } + + if (typeof step === 'function') { + return fill(start, end, 1, { transform: step }); + } + + if (isObject(step)) { + return fill(start, end, 0, step); + } + + let opts = { ...options }; + if (opts.capture === true) opts.wrap = true; + step = step || opts.step || 1; + + if (!isNumber(step)) { + if (step != null && !isObject(step)) return invalidStep(step, opts); + return fill(start, end, 1, step); + } + + if (isNumber(start) && isNumber(end)) { + return fillNumbers(start, end, step, opts); + } + + return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); +}; + +module.exports = fill; diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json new file mode 100644 index 0000000..582357f --- /dev/null +++ b/node_modules/fill-range/package.json @@ -0,0 +1,74 @@ +{ + "name": "fill-range", + "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", + "version": "7.1.1", + "homepage": "https://github.com/jonschlinkert/fill-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Edo Rivai (edo.rivai.nl)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Paul Miller (paulmillr.com)", + "Rouven Weßling (www.rouvenwessling.de)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/fill-range", + "bugs": { + "url": "https://github.com/jonschlinkert/fill-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "devDependencies": { + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1", + "nyc": "^15.1.0" + }, + "keywords": [ + "alpha", + "alphabetical", + "array", + "bash", + "brace", + "expand", + "expansion", + "fill", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "regex", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/get-stream/buffer-stream.js b/node_modules/get-stream/buffer-stream.js new file mode 100644 index 0000000..2dd7574 --- /dev/null +++ b/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,52 @@ +'use strict'; +const {PassThrough: PassThroughStream} = require('stream'); + +module.exports = options => { + options = {...options}; + + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + + if (isBuffer) { + encoding = null; + } + + const stream = new PassThroughStream({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + let length = 0; + const chunks = []; + + stream.on('data', chunk => { + chunks.push(chunk); + + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; +}; diff --git a/node_modules/get-stream/index.d.ts b/node_modules/get-stream/index.d.ts new file mode 100644 index 0000000..9485b2b --- /dev/null +++ b/node_modules/get-stream/index.d.ts @@ -0,0 +1,105 @@ +/// +import {Stream} from 'stream'; + +declare class MaxBufferErrorClass extends Error { + readonly name: 'MaxBufferError'; + constructor(); +} + +declare namespace getStream { + interface Options { + /** + Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error. + + @default Infinity + */ + readonly maxBuffer?: number; + } + + interface OptionsWithEncoding extends Options { + /** + [Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + + @default 'utf8' + */ + readonly encoding?: EncodingType; + } + + type MaxBufferError = MaxBufferErrorClass; +} + +declare const getStream: { + /** + Get the `stream` as a string. + + @returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + + @example + ``` + import * as fs from 'fs'; + import getStream = require('get-stream'); + + (async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + // ,,))))))));, + // __)))))))))))))), + // \|/ -\(((((''''((((((((. + // -*-==//////(('' . `)))))), + // /|\ ))| o ;-. '((((( ,(, + // ( `| / ) ;))))' ,_))^;(~ + // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + // ; ''''```` `: `:::|\,__,%% );`'; ~ + // | _ ) / `:|`----' `-' + // ______/\/~ | / / + // /~;;.____/;;' / ___--,-( `;;;/ + // / // _;______;'------~~~~~ /;;/\ / + // // | | / ; \;;,\ + // (<_ | ; /',/-----' _> + // \_| ||_ //~;~~~~~~~~~ + // `\_| (,~~ + // \~\ + // ~~ + })(); + ``` + */ + (stream: Stream, options?: getStream.OptionsWithEncoding): Promise; + + /** + Get the `stream` as a buffer. + + It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + */ + buffer( + stream: Stream, + options?: getStream.Options + ): Promise; + + /** + Get the `stream` as an array of values. + + It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + + - When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + - When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + - When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + */ + array( + stream: Stream, + options?: getStream.Options + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding<'buffer'> + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding + ): Promise; + + MaxBufferError: typeof MaxBufferErrorClass; +}; + +export = getStream; diff --git a/node_modules/get-stream/index.js b/node_modules/get-stream/index.js new file mode 100644 index 0000000..1c5d028 --- /dev/null +++ b/node_modules/get-stream/index.js @@ -0,0 +1,61 @@ +'use strict'; +const {constants: BufferConstants} = require('buffer'); +const stream = require('stream'); +const {promisify} = require('util'); +const bufferStream = require('./buffer-stream'); + +const streamPipelinePromisified = promisify(stream.pipeline); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + throw new Error('Expected a stream'); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + const stream = bufferStream(options); + + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + (async () => { + try { + await streamPipelinePromisified(inputStream, stream); + resolve(); + } catch (error) { + rejectPromise(error); + } + })(); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/get-stream/license b/node_modules/get-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/get-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/get-stream/package.json b/node_modules/get-stream/package.json new file mode 100644 index 0000000..bd47a75 --- /dev/null +++ b/node_modules/get-stream/package.json @@ -0,0 +1,47 @@ +{ + "name": "get-stream", + "version": "6.0.1", + "description": "Get a stream as a string, buffer, or array", + "license": "MIT", + "repository": "sindresorhus/get-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "buffer-stream.js" + ], + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object" + ], + "devDependencies": { + "@types/node": "^14.0.27", + "ava": "^2.4.0", + "into-stream": "^5.0.0", + "tsd": "^0.13.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/get-stream/readme.md b/node_modules/get-stream/readme.md new file mode 100644 index 0000000..70b01fd --- /dev/null +++ b/node_modules/get-stream/readme.md @@ -0,0 +1,124 @@ +# get-stream + +> Get a stream as a string, buffer, or array + +## Install + +``` +$ npm install get-stream +``` + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); + +(async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +})(); +``` + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, options?) + +Get the `stream` as a string. + +#### options + +Type: `object` + +##### encoding + +Type: `string`\ +Default: `'utf8'` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`\ +Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. + +### getStream.buffer(stream, options?) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, options?) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +(async () => { + try { + await getStream(streamThatErrorsAtTheEnd('unicorn')); + } catch (error) { + console.log(error.bufferedData); + //=> 'unicorn' + } +})() +``` + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/glob-parent/CHANGELOG.md b/node_modules/glob-parent/CHANGELOG.md new file mode 100644 index 0000000..fb9de96 --- /dev/null +++ b/node_modules/glob-parent/CHANGELOG.md @@ -0,0 +1,110 @@ +### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) + + +### Bug Fixes + +* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) + +### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) + + +### Bug Fixes + +* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) + +## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) + + +### Features + +* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) + +## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* Drop support for node <6 & bump dependencies + +### Miscellaneous Chores + +* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) + +## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* question marks are valid path characters on Windows so avoid flagging as a glob when alone +* Update is-glob dependency + +### Features + +* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) +* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) +* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) + +## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) + + +### Features + +* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) +* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) +* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) +* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) +* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) + + +### Bug Fixes + +* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) + +### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) + + +### Features + +* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) + + +### Bug Fixes + +* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) + +## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* update is-glob dependency + +### Features + +* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) + +## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) + + +### Features + +* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) + +## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) + +## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) + + +### Reverts + +* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) + +## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) + + +### Features + +* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) + +## 1.0.0 (2021-01-27) + diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..63222d7 --- /dev/null +++ b/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md new file mode 100644 index 0000000..36a2793 --- /dev/null +++ b/node_modules/glob-parent/README.md @@ -0,0 +1,137 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/') // 'foo' +globParent('foo/\\[bar]/') // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]') // 'foo ' +globParent('foo \\[bar]*') // 'foo ' + +// GOOD +globParent('./foo \\[bar]') // 'foo [bar]' +globParent('./foo \\[bar]*') // '.' +``` + +## License + +ISC + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg + +[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master +[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master + +[travis-url]: https://travis-ci.org/gulpjs/glob-parent +[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci + +[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent +[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg + +[gitter-url]: https://gitter.im/gulpjs/gulp +[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09e257e --- /dev/null +++ b/node_modules/glob-parent/index.js @@ -0,0 +1,42 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json new file mode 100644 index 0000000..125c971 --- /dev/null +++ b/node_modules/glob-parent/package.json @@ -0,0 +1,48 @@ +{ + "name": "glob-parent", + "version": "5.1.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">= 6" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only", + "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "dependencies": { + "is-glob": "^4.0.1" + }, + "devDependencies": { + "coveralls": "^3.0.11", + "eslint": "^2.13.1", + "eslint-config-gulp": "^3.0.1", + "expect": "^1.20.2", + "mocha": "^6.0.2", + "nyc": "^13.3.0" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/human-signals/CHANGELOG.md b/node_modules/human-signals/CHANGELOG.md new file mode 100644 index 0000000..70d0392 --- /dev/null +++ b/node_modules/human-signals/CHANGELOG.md @@ -0,0 +1,11 @@ +# 2.1.0 + +## TypeScript types + +- Add [TypeScript definitions](src/main.d.ts) + +# 2.0.0 + +## Breaking changes + +- Minimal supported Node.js version is now `10.17.0` diff --git a/node_modules/human-signals/LICENSE b/node_modules/human-signals/LICENSE new file mode 100644 index 0000000..9af9492 --- /dev/null +++ b/node_modules/human-signals/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 ehmicky + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/human-signals/README.md b/node_modules/human-signals/README.md new file mode 100644 index 0000000..2af37c3 --- /dev/null +++ b/node_modules/human-signals/README.md @@ -0,0 +1,165 @@ +[![Codecov](https://img.shields.io/codecov/c/github/ehmicky/human-signals.svg?label=tested&logo=codecov)](https://codecov.io/gh/ehmicky/human-signals) +[![Travis](https://img.shields.io/badge/cross-platform-4cc61e.svg?logo=travis)](https://travis-ci.org/ehmicky/human-signals) +[![Node](https://img.shields.io/node/v/human-signals.svg?logo=node.js)](https://www.npmjs.com/package/human-signals) +[![Gitter](https://img.shields.io/gitter/room/ehmicky/human-signals.svg?logo=gitter)](https://gitter.im/ehmicky/human-signals) +[![Twitter](https://img.shields.io/badge/%E2%80%8B-twitter-4cc61e.svg?logo=twitter)](https://twitter.com/intent/follow?screen_name=ehmicky) +[![Medium](https://img.shields.io/badge/%E2%80%8B-medium-4cc61e.svg?logo=medium)](https://medium.com/@ehmicky) + +Human-friendly process signals. + +This is a map of known process signals with some information about each signal. + +Unlike +[`os.constants.signals`](https://nodejs.org/api/os.html#os_signal_constants) +this includes: + +- human-friendly [descriptions](#description) +- [default actions](#action), including whether they [can be prevented](#forced) +- whether the signal is [supported](#supported) by the current OS + +# Example + +```js +const { signalsByName, signalsByNumber } = require('human-signals') + +console.log(signalsByName.SIGINT) +// { +// name: 'SIGINT', +// number: 2, +// description: 'User interruption with CTRL-C', +// supported: true, +// action: 'terminate', +// forced: false, +// standard: 'ansi' +// } + +console.log(signalsByNumber[8]) +// { +// name: 'SIGFPE', +// number: 8, +// description: 'Floating point arithmetic error', +// supported: true, +// action: 'core', +// forced: false, +// standard: 'ansi' +// } +``` + +# Install + +```bash +npm install human-signals +``` + +# Usage + +## signalsByName + +_Type_: `object` + +Object whose keys are signal [names](#name) and values are +[signal objects](#signal). + +## signalsByNumber + +_Type_: `object` + +Object whose keys are signal [numbers](#number) and values are +[signal objects](#signal). + +## signal + +_Type_: `object` + +Signal object with the following properties. + +### name + +_Type_: `string` + +Standard name of the signal, for example `'SIGINT'`. + +### number + +_Type_: `number` + +Code number of the signal, for example `2`. While most `number` are +cross-platform, some are different between different OS. + +### description + +_Type_: `string` + +Human-friendly description for the signal, for example +`'User interruption with CTRL-C'`. + +### supported + +_Type_: `boolean` + +Whether the current OS can handle this signal in Node.js using +[`process.on(name, handler)`](https://nodejs.org/api/process.html#process_signal_events). + +The list of supported signals +[is OS-specific](https://github.com/ehmicky/cross-platform-node-guide/blob/master/docs/6_networking_ipc/signals.md#cross-platform-signals). + +### action + +_Type_: `string`\ +_Enum_: `'terminate'`, `'core'`, `'ignore'`, `'pause'`, `'unpause'` + +What is the default action for this signal when it is not handled. + +### forced + +_Type_: `boolean` + +Whether the signal's default action cannot be prevented. This is `true` for +`SIGTERM`, `SIGKILL` and `SIGSTOP`. + +### standard + +_Type_: `string`\ +_Enum_: `'ansi'`, `'posix'`, `'bsd'`, `'systemv'`, `'other'` + +Which standard defined that signal. + +# Support + +If you found a bug or would like a new feature, _don't hesitate_ to +[submit an issue on GitHub](../../issues). + +For other questions, feel free to +[chat with us on Gitter](https://gitter.im/ehmicky/human-signals). + +Everyone is welcome regardless of personal background. We enforce a +[Code of conduct](CODE_OF_CONDUCT.md) in order to promote a positive and +inclusive environment. + +# Contributing + +This project was made with ❤️. The simplest way to give back is by starring and +sharing it online. + +If the documentation is unclear or has a typo, please click on the page's `Edit` +button (pencil icon) and suggest a correction. + +If you would like to help us fix a bug or add a new feature, please check our +[guidelines](CONTRIBUTING.md). Pull requests are welcome! + +Thanks go to our wonderful contributors: + + + + + + + + + +

ehmicky

💻 🎨 🤔 📖

electrovir

💻
+ + + + + diff --git a/node_modules/human-signals/build/src/core.js b/node_modules/human-signals/build/src/core.js new file mode 100644 index 0000000..98e8fce --- /dev/null +++ b/node_modules/human-signals/build/src/core.js @@ -0,0 +1,273 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGNALS=void 0; + +const SIGNALS=[ +{ +name:"SIGHUP", +number:1, +action:"terminate", +description:"Terminal closed", +standard:"posix"}, + +{ +name:"SIGINT", +number:2, +action:"terminate", +description:"User interruption with CTRL-C", +standard:"ansi"}, + +{ +name:"SIGQUIT", +number:3, +action:"core", +description:"User interruption with CTRL-\\", +standard:"posix"}, + +{ +name:"SIGILL", +number:4, +action:"core", +description:"Invalid machine instruction", +standard:"ansi"}, + +{ +name:"SIGTRAP", +number:5, +action:"core", +description:"Debugger breakpoint", +standard:"posix"}, + +{ +name:"SIGABRT", +number:6, +action:"core", +description:"Aborted", +standard:"ansi"}, + +{ +name:"SIGIOT", +number:6, +action:"core", +description:"Aborted", +standard:"bsd"}, + +{ +name:"SIGBUS", +number:7, +action:"core", +description: +"Bus error due to misaligned, non-existing address or paging error", +standard:"bsd"}, + +{ +name:"SIGEMT", +number:7, +action:"terminate", +description:"Command should be emulated but is not implemented", +standard:"other"}, + +{ +name:"SIGFPE", +number:8, +action:"core", +description:"Floating point arithmetic error", +standard:"ansi"}, + +{ +name:"SIGKILL", +number:9, +action:"terminate", +description:"Forced termination", +standard:"posix", +forced:true}, + +{ +name:"SIGUSR1", +number:10, +action:"terminate", +description:"Application-specific signal", +standard:"posix"}, + +{ +name:"SIGSEGV", +number:11, +action:"core", +description:"Segmentation fault", +standard:"ansi"}, + +{ +name:"SIGUSR2", +number:12, +action:"terminate", +description:"Application-specific signal", +standard:"posix"}, + +{ +name:"SIGPIPE", +number:13, +action:"terminate", +description:"Broken pipe or socket", +standard:"posix"}, + +{ +name:"SIGALRM", +number:14, +action:"terminate", +description:"Timeout or timer", +standard:"posix"}, + +{ +name:"SIGTERM", +number:15, +action:"terminate", +description:"Termination", +standard:"ansi"}, + +{ +name:"SIGSTKFLT", +number:16, +action:"terminate", +description:"Stack is empty or overflowed", +standard:"other"}, + +{ +name:"SIGCHLD", +number:17, +action:"ignore", +description:"Child process terminated, paused or unpaused", +standard:"posix"}, + +{ +name:"SIGCLD", +number:17, +action:"ignore", +description:"Child process terminated, paused or unpaused", +standard:"other"}, + +{ +name:"SIGCONT", +number:18, +action:"unpause", +description:"Unpaused", +standard:"posix", +forced:true}, + +{ +name:"SIGSTOP", +number:19, +action:"pause", +description:"Paused", +standard:"posix", +forced:true}, + +{ +name:"SIGTSTP", +number:20, +action:"pause", +description:"Paused using CTRL-Z or \"suspend\"", +standard:"posix"}, + +{ +name:"SIGTTIN", +number:21, +action:"pause", +description:"Background process cannot read terminal input", +standard:"posix"}, + +{ +name:"SIGBREAK", +number:21, +action:"terminate", +description:"User interruption with CTRL-BREAK", +standard:"other"}, + +{ +name:"SIGTTOU", +number:22, +action:"pause", +description:"Background process cannot write to terminal output", +standard:"posix"}, + +{ +name:"SIGURG", +number:23, +action:"ignore", +description:"Socket received out-of-band data", +standard:"bsd"}, + +{ +name:"SIGXCPU", +number:24, +action:"core", +description:"Process timed out", +standard:"bsd"}, + +{ +name:"SIGXFSZ", +number:25, +action:"core", +description:"File too big", +standard:"bsd"}, + +{ +name:"SIGVTALRM", +number:26, +action:"terminate", +description:"Timeout or timer", +standard:"bsd"}, + +{ +name:"SIGPROF", +number:27, +action:"terminate", +description:"Timeout or timer", +standard:"bsd"}, + +{ +name:"SIGWINCH", +number:28, +action:"ignore", +description:"Terminal window size changed", +standard:"bsd"}, + +{ +name:"SIGIO", +number:29, +action:"terminate", +description:"I/O is available", +standard:"other"}, + +{ +name:"SIGPOLL", +number:29, +action:"terminate", +description:"Watched event", +standard:"other"}, + +{ +name:"SIGINFO", +number:29, +action:"ignore", +description:"Request for process information", +standard:"other"}, + +{ +name:"SIGPWR", +number:30, +action:"terminate", +description:"Device running out of power", +standard:"systemv"}, + +{ +name:"SIGSYS", +number:31, +action:"core", +description:"Invalid system call", +standard:"other"}, + +{ +name:"SIGUNUSED", +number:31, +action:"terminate", +description:"Invalid system call", +standard:"other"}];exports.SIGNALS=SIGNALS; +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/core.js.map b/node_modules/human-signals/build/src/core.js.map new file mode 100644 index 0000000..cbfce26 --- /dev/null +++ b/node_modules/human-signals/build/src/core.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/core.js"],"names":["SIGNALS","name","number","action","description","standard","forced"],"mappings":";;AAEO,KAAMA,CAAAA,OAAO,CAAG;AACrB;AACEC,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,iBAJf;AAKEC,QAAQ,CAAE,OALZ,CADqB;;AAQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,+BAJf;AAKEC,QAAQ,CAAE,MALZ,CARqB;;AAerB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,gCAJf;AAKEC,QAAQ,CAAE,OALZ,CAfqB;;AAsBrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,MALZ,CAtBqB;;AA6BrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CA7BqB;;AAoCrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,MALZ,CApCqB;;AA2CrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,KALZ,CA3CqB;;AAkDrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW;AACT,mEALJ;AAMEC,QAAQ,CAAE,KANZ,CAlDqB;;AA0DrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mDAJf;AAKEC,QAAQ,CAAE,OALZ,CA1DqB;;AAiErB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,MALZ,CAjEqB;;AAwErB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAxEqB;;AAgFrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CAhFqB;;AAuFrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,MALZ,CAvFqB;;AA8FrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CA9FqB;;AAqGrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,uBAJf;AAKEC,QAAQ,CAAE,OALZ,CArGqB;;AA4GrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CA5GqB;;AAmHrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,aAJf;AAKEC,QAAQ,CAAE,MALZ,CAnHqB;;AA0HrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,OALZ,CA1HqB;;AAiIrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAjIqB;;AAwIrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAxIqB;;AA+IrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,SAHV;AAIEC,WAAW,CAAE,UAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CA/IqB;;AAuJrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,QAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAvJqB;;AA+JrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oCAJf;AAKEC,QAAQ,CAAE,OALZ,CA/JqB;;AAsKrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,+CAJf;AAKEC,QAAQ,CAAE,OALZ,CAtKqB;;AA6KrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mCAJf;AAKEC,QAAQ,CAAE,OALZ,CA7KqB;;AAoLrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oDAJf;AAKEC,QAAQ,CAAE,OALZ,CApLqB;;AA2LrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,kCAJf;AAKEC,QAAQ,CAAE,KALZ,CA3LqB;;AAkMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,mBAJf;AAKEC,QAAQ,CAAE,KALZ,CAlMqB;;AAyMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,cAJf;AAKEC,QAAQ,CAAE,KALZ,CAzMqB;;AAgNrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAhNqB;;AAuNrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAvNqB;;AA8NrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,KALZ,CA9NqB;;AAqOrB;AACEJ,IAAI,CAAE,OADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CArOqB;;AA4OrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,eAJf;AAKEC,QAAQ,CAAE,OALZ,CA5OqB;;AAmPrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,OALZ,CAnPqB;;AA0PrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,SALZ,CA1PqB;;AAiQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAjQqB;;AAwQrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAxQqB,CAAhB,C","sourcesContent":["/* eslint-disable max-lines */\n// List of known process signals with information about them\nexport const SIGNALS = [\n {\n name: 'SIGHUP',\n number: 1,\n action: 'terminate',\n description: 'Terminal closed',\n standard: 'posix',\n },\n {\n name: 'SIGINT',\n number: 2,\n action: 'terminate',\n description: 'User interruption with CTRL-C',\n standard: 'ansi',\n },\n {\n name: 'SIGQUIT',\n number: 3,\n action: 'core',\n description: 'User interruption with CTRL-\\\\',\n standard: 'posix',\n },\n {\n name: 'SIGILL',\n number: 4,\n action: 'core',\n description: 'Invalid machine instruction',\n standard: 'ansi',\n },\n {\n name: 'SIGTRAP',\n number: 5,\n action: 'core',\n description: 'Debugger breakpoint',\n standard: 'posix',\n },\n {\n name: 'SIGABRT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'ansi',\n },\n {\n name: 'SIGIOT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'bsd',\n },\n {\n name: 'SIGBUS',\n number: 7,\n action: 'core',\n description:\n 'Bus error due to misaligned, non-existing address or paging error',\n standard: 'bsd',\n },\n {\n name: 'SIGEMT',\n number: 7,\n action: 'terminate',\n description: 'Command should be emulated but is not implemented',\n standard: 'other',\n },\n {\n name: 'SIGFPE',\n number: 8,\n action: 'core',\n description: 'Floating point arithmetic error',\n standard: 'ansi',\n },\n {\n name: 'SIGKILL',\n number: 9,\n action: 'terminate',\n description: 'Forced termination',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGUSR1',\n number: 10,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGSEGV',\n number: 11,\n action: 'core',\n description: 'Segmentation fault',\n standard: 'ansi',\n },\n {\n name: 'SIGUSR2',\n number: 12,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGPIPE',\n number: 13,\n action: 'terminate',\n description: 'Broken pipe or socket',\n standard: 'posix',\n },\n {\n name: 'SIGALRM',\n number: 14,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'posix',\n },\n {\n name: 'SIGTERM',\n number: 15,\n action: 'terminate',\n description: 'Termination',\n standard: 'ansi',\n },\n {\n name: 'SIGSTKFLT',\n number: 16,\n action: 'terminate',\n description: 'Stack is empty or overflowed',\n standard: 'other',\n },\n {\n name: 'SIGCHLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'posix',\n },\n {\n name: 'SIGCLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'other',\n },\n {\n name: 'SIGCONT',\n number: 18,\n action: 'unpause',\n description: 'Unpaused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGSTOP',\n number: 19,\n action: 'pause',\n description: 'Paused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGTSTP',\n number: 20,\n action: 'pause',\n description: 'Paused using CTRL-Z or \"suspend\"',\n standard: 'posix',\n },\n {\n name: 'SIGTTIN',\n number: 21,\n action: 'pause',\n description: 'Background process cannot read terminal input',\n standard: 'posix',\n },\n {\n name: 'SIGBREAK',\n number: 21,\n action: 'terminate',\n description: 'User interruption with CTRL-BREAK',\n standard: 'other',\n },\n {\n name: 'SIGTTOU',\n number: 22,\n action: 'pause',\n description: 'Background process cannot write to terminal output',\n standard: 'posix',\n },\n {\n name: 'SIGURG',\n number: 23,\n action: 'ignore',\n description: 'Socket received out-of-band data',\n standard: 'bsd',\n },\n {\n name: 'SIGXCPU',\n number: 24,\n action: 'core',\n description: 'Process timed out',\n standard: 'bsd',\n },\n {\n name: 'SIGXFSZ',\n number: 25,\n action: 'core',\n description: 'File too big',\n standard: 'bsd',\n },\n {\n name: 'SIGVTALRM',\n number: 26,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGPROF',\n number: 27,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGWINCH',\n number: 28,\n action: 'ignore',\n description: 'Terminal window size changed',\n standard: 'bsd',\n },\n {\n name: 'SIGIO',\n number: 29,\n action: 'terminate',\n description: 'I/O is available',\n standard: 'other',\n },\n {\n name: 'SIGPOLL',\n number: 29,\n action: 'terminate',\n description: 'Watched event',\n standard: 'other',\n },\n {\n name: 'SIGINFO',\n number: 29,\n action: 'ignore',\n description: 'Request for process information',\n standard: 'other',\n },\n {\n name: 'SIGPWR',\n number: 30,\n action: 'terminate',\n description: 'Device running out of power',\n standard: 'systemv',\n },\n {\n name: 'SIGSYS',\n number: 31,\n action: 'core',\n description: 'Invalid system call',\n standard: 'other',\n },\n {\n name: 'SIGUNUSED',\n number: 31,\n action: 'terminate',\n description: 'Invalid system call',\n standard: 'other',\n },\n]\n/* eslint-enable max-lines */\n"],"file":"src/core.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/main.d.ts b/node_modules/human-signals/build/src/main.d.ts new file mode 100644 index 0000000..2dc5ea7 --- /dev/null +++ b/node_modules/human-signals/build/src/main.d.ts @@ -0,0 +1,52 @@ +/** + * Object whose keys are signal names and values are signal objects. + */ +export declare const signalsByName: { [signalName: string]: Signal } +/** + * Object whose keys are signal numbers and values are signal objects. + */ +export declare const signalsByNumber: { [signalNumber: string]: Signal } + +export declare type SignalAction = + | 'terminate' + | 'core' + | 'ignore' + | 'pause' + | 'unpause' +export declare type SignalStandard = + | 'ansi' + | 'posix' + | 'bsd' + | 'systemv' + | 'other' + +export declare type Signal = { + /** + * Standard name of the signal, for example 'SIGINT'. + */ + name: string + /** + * Code number of the signal, for example 2. While most number are cross-platform, some are different between different OS. + */ + number: number + /** + * Human-friendly description for the signal, for example 'User interruption with CTRL-C'. + */ + description: string + /** + * Whether the current OS can handle this signal in Node.js using process.on(name, handler). The list of supported signals is OS-specific. + */ + supported: boolean + /** + * What is the default action for this signal when it is not handled. + */ + action: SignalAction + /** + * Whether the signal's default action cannot be prevented. This is true for SIGTERM, SIGKILL and SIGSTOP. + */ + forced: boolean + /** + * Which standard defined that signal. + */ + standard: SignalStandard +} diff --git a/node_modules/human-signals/build/src/main.js b/node_modules/human-signals/build/src/main.js new file mode 100644 index 0000000..88f5fd2 --- /dev/null +++ b/node_modules/human-signals/build/src/main.js @@ -0,0 +1,71 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.signalsByNumber=exports.signalsByName=void 0;var _os=require("os"); + +var _signals=require("./signals.js"); +var _realtime=require("./realtime.js"); + + + +const getSignalsByName=function(){ +const signals=(0,_signals.getSignals)(); +return signals.reduce(getSignalByName,{}); +}; + +const getSignalByName=function( +signalByNameMemo, +{name,number,description,supported,action,forced,standard}) +{ +return{ +...signalByNameMemo, +[name]:{name,number,description,supported,action,forced,standard}}; + +}; + +const signalsByName=getSignalsByName();exports.signalsByName=signalsByName; + + + + +const getSignalsByNumber=function(){ +const signals=(0,_signals.getSignals)(); +const length=_realtime.SIGRTMAX+1; +const signalsA=Array.from({length},(value,number)=> +getSignalByNumber(number,signals)); + +return Object.assign({},...signalsA); +}; + +const getSignalByNumber=function(number,signals){ +const signal=findSignalByNumber(number,signals); + +if(signal===undefined){ +return{}; +} + +const{name,description,supported,action,forced,standard}=signal; +return{ +[number]:{ +name, +number, +description, +supported, +action, +forced, +standard}}; + + +}; + + + +const findSignalByNumber=function(number,signals){ +const signal=signals.find(({name})=>_os.constants.signals[name]===number); + +if(signal!==undefined){ +return signal; +} + +return signals.find(signalA=>signalA.number===number); +}; + +const signalsByNumber=getSignalsByNumber();exports.signalsByNumber=signalsByNumber; +//# sourceMappingURL=main.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/main.js.map b/node_modules/human-signals/build/src/main.js.map new file mode 100644 index 0000000..3fdcede --- /dev/null +++ b/node_modules/human-signals/build/src/main.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/main.js"],"names":["getSignalsByName","signals","reduce","getSignalByName","signalByNameMemo","name","number","description","supported","action","forced","standard","signalsByName","getSignalsByNumber","length","SIGRTMAX","signalsA","Array","from","value","getSignalByNumber","Object","assign","signal","findSignalByNumber","undefined","find","constants","signalA","signalsByNumber"],"mappings":"2HAAA;;AAEA;AACA;;;;AAIA,KAAMA,CAAAA,gBAAgB,CAAG,UAAW;AAClC,KAAMC,CAAAA,OAAO,CAAG,yBAAhB;AACA,MAAOA,CAAAA,OAAO,CAACC,MAAR,CAAeC,eAAf,CAAgC,EAAhC,CAAP;AACD,CAHD;;AAKA,KAAMA,CAAAA,eAAe,CAAG;AACtBC,gBADsB;AAEtB,CAAEC,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFsB;AAGtB;AACA,MAAO;AACL,GAAGP,gBADE;AAEL,CAACC,IAAD,EAAQ,CAAEA,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFH,CAAP;;AAID,CARD;;AAUO,KAAMC,CAAAA,aAAa,CAAGZ,gBAAgB,EAAtC,C;;;;;AAKP,KAAMa,CAAAA,kBAAkB,CAAG,UAAW;AACpC,KAAMZ,CAAAA,OAAO,CAAG,yBAAhB;AACA,KAAMa,CAAAA,MAAM,CAAGC,mBAAW,CAA1B;AACA,KAAMC,CAAAA,QAAQ,CAAGC,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuB,CAACK,KAAD,CAAQb,MAAR;AACtCc,iBAAiB,CAACd,MAAD,CAASL,OAAT,CADF,CAAjB;;AAGA,MAAOoB,CAAAA,MAAM,CAACC,MAAP,CAAc,EAAd,CAAkB,GAAGN,QAArB,CAAP;AACD,CAPD;;AASA,KAAMI,CAAAA,iBAAiB,CAAG,SAASd,MAAT,CAAiBL,OAAjB,CAA0B;AAClD,KAAMsB,CAAAA,MAAM,CAAGC,kBAAkB,CAAClB,MAAD,CAASL,OAAT,CAAjC;;AAEA,GAAIsB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAO,EAAP;AACD;;AAED,KAAM,CAAEpB,IAAF,CAAQE,WAAR,CAAqBC,SAArB,CAAgCC,MAAhC,CAAwCC,MAAxC,CAAgDC,QAAhD,EAA6DY,MAAnE;AACA,MAAO;AACL,CAACjB,MAAD,EAAU;AACRD,IADQ;AAERC,MAFQ;AAGRC,WAHQ;AAIRC,SAJQ;AAKRC,MALQ;AAMRC,MANQ;AAORC,QAPQ,CADL,CAAP;;;AAWD,CAnBD;;;;AAuBA,KAAMa,CAAAA,kBAAkB,CAAG,SAASlB,MAAT,CAAiBL,OAAjB,CAA0B;AACnD,KAAMsB,CAAAA,MAAM,CAAGtB,OAAO,CAACyB,IAAR,CAAa,CAAC,CAAErB,IAAF,CAAD,GAAcsB,cAAU1B,OAAV,CAAkBI,IAAlB,IAA4BC,MAAvD,CAAf;;AAEA,GAAIiB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAOF,CAAAA,MAAP;AACD;;AAED,MAAOtB,CAAAA,OAAO,CAACyB,IAAR,CAAaE,OAAO,EAAIA,OAAO,CAACtB,MAAR,GAAmBA,MAA3C,CAAP;AACD,CARD;;AAUO,KAAMuB,CAAAA,eAAe,CAAGhB,kBAAkB,EAA1C,C","sourcesContent":["import { constants } from 'os'\n\nimport { getSignals } from './signals.js'\nimport { SIGRTMAX } from './realtime.js'\n\n// Retrieve `signalsByName`, an object mapping signal name to signal properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByName = function() {\n const signals = getSignals()\n return signals.reduce(getSignalByName, {})\n}\n\nconst getSignalByName = function(\n signalByNameMemo,\n { name, number, description, supported, action, forced, standard },\n) {\n return {\n ...signalByNameMemo,\n [name]: { name, number, description, supported, action, forced, standard },\n }\n}\n\nexport const signalsByName = getSignalsByName()\n\n// Retrieve `signalsByNumber`, an object mapping signal number to signal\n// properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByNumber = function() {\n const signals = getSignals()\n const length = SIGRTMAX + 1\n const signalsA = Array.from({ length }, (value, number) =>\n getSignalByNumber(number, signals),\n )\n return Object.assign({}, ...signalsA)\n}\n\nconst getSignalByNumber = function(number, signals) {\n const signal = findSignalByNumber(number, signals)\n\n if (signal === undefined) {\n return {}\n }\n\n const { name, description, supported, action, forced, standard } = signal\n return {\n [number]: {\n name,\n number,\n description,\n supported,\n action,\n forced,\n standard,\n },\n }\n}\n\n// Several signals might end up sharing the same number because of OS-specific\n// numbers, in which case those prevail.\nconst findSignalByNumber = function(number, signals) {\n const signal = signals.find(({ name }) => constants.signals[name] === number)\n\n if (signal !== undefined) {\n return signal\n }\n\n return signals.find(signalA => signalA.number === number)\n}\n\nexport const signalsByNumber = getSignalsByNumber()\n"],"file":"src/main.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/realtime.js b/node_modules/human-signals/build/src/realtime.js new file mode 100644 index 0000000..f665516 --- /dev/null +++ b/node_modules/human-signals/build/src/realtime.js @@ -0,0 +1,19 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGRTMAX=exports.getRealtimeSignals=void 0; +const getRealtimeSignals=function(){ +const length=SIGRTMAX-SIGRTMIN+1; +return Array.from({length},getRealtimeSignal); +};exports.getRealtimeSignals=getRealtimeSignals; + +const getRealtimeSignal=function(value,index){ +return{ +name:`SIGRT${index+1}`, +number:SIGRTMIN+index, +action:"terminate", +description:"Application-specific signal (realtime)", +standard:"posix"}; + +}; + +const SIGRTMIN=34; +const SIGRTMAX=64;exports.SIGRTMAX=SIGRTMAX; +//# sourceMappingURL=realtime.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/realtime.js.map b/node_modules/human-signals/build/src/realtime.js.map new file mode 100644 index 0000000..808bbd1 --- /dev/null +++ b/node_modules/human-signals/build/src/realtime.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/realtime.js"],"names":["getRealtimeSignals","length","SIGRTMAX","SIGRTMIN","Array","from","getRealtimeSignal","value","index","name","number","action","description","standard"],"mappings":";AACO,KAAMA,CAAAA,kBAAkB,CAAG,UAAW;AAC3C,KAAMC,CAAAA,MAAM,CAAGC,QAAQ,CAAGC,QAAX,CAAsB,CAArC;AACA,MAAOC,CAAAA,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuBK,iBAAvB,CAAP;AACD,CAHM,C;;AAKP,KAAMA,CAAAA,iBAAiB,CAAG,SAASC,KAAT,CAAgBC,KAAhB,CAAuB;AAC/C,MAAO;AACLC,IAAI,CAAG,QAAOD,KAAK,CAAG,CAAE,EADnB;AAELE,MAAM,CAAEP,QAAQ,CAAGK,KAFd;AAGLG,MAAM,CAAE,WAHH;AAILC,WAAW,CAAE,wCAJR;AAKLC,QAAQ,CAAE,OALL,CAAP;;AAOD,CARD;;AAUA,KAAMV,CAAAA,QAAQ,CAAG,EAAjB;AACO,KAAMD,CAAAA,QAAQ,CAAG,EAAjB,C","sourcesContent":["// List of realtime signals with information about them\nexport const getRealtimeSignals = function() {\n const length = SIGRTMAX - SIGRTMIN + 1\n return Array.from({ length }, getRealtimeSignal)\n}\n\nconst getRealtimeSignal = function(value, index) {\n return {\n name: `SIGRT${index + 1}`,\n number: SIGRTMIN + index,\n action: 'terminate',\n description: 'Application-specific signal (realtime)',\n standard: 'posix',\n }\n}\n\nconst SIGRTMIN = 34\nexport const SIGRTMAX = 64\n"],"file":"src/realtime.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/signals.js b/node_modules/human-signals/build/src/signals.js new file mode 100644 index 0000000..ab3b387 --- /dev/null +++ b/node_modules/human-signals/build/src/signals.js @@ -0,0 +1,35 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.getSignals=void 0;var _os=require("os"); + +var _core=require("./core.js"); +var _realtime=require("./realtime.js"); + + + +const getSignals=function(){ +const realtimeSignals=(0,_realtime.getRealtimeSignals)(); +const signals=[..._core.SIGNALS,...realtimeSignals].map(normalizeSignal); +return signals; +};exports.getSignals=getSignals; + + + + + + + +const normalizeSignal=function({ +name, +number:defaultNumber, +description, +action, +forced=false, +standard}) +{ +const{ +signals:{[name]:constantSignal}}= +_os.constants; +const supported=constantSignal!==undefined; +const number=supported?constantSignal:defaultNumber; +return{name,number,description,supported,action,forced,standard}; +}; +//# sourceMappingURL=signals.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/signals.js.map b/node_modules/human-signals/build/src/signals.js.map new file mode 100644 index 0000000..2a6b919 --- /dev/null +++ b/node_modules/human-signals/build/src/signals.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/signals.js"],"names":["getSignals","realtimeSignals","signals","SIGNALS","map","normalizeSignal","name","number","defaultNumber","description","action","forced","standard","constantSignal","constants","supported","undefined"],"mappings":"gGAAA;;AAEA;AACA;;;;AAIO,KAAMA,CAAAA,UAAU,CAAG,UAAW;AACnC,KAAMC,CAAAA,eAAe,CAAG,kCAAxB;AACA,KAAMC,CAAAA,OAAO,CAAG,CAAC,GAAGC,aAAJ,CAAa,GAAGF,eAAhB,EAAiCG,GAAjC,CAAqCC,eAArC,CAAhB;AACA,MAAOH,CAAAA,OAAP;AACD,CAJM,C;;;;;;;;AAYP,KAAMG,CAAAA,eAAe,CAAG,SAAS;AAC/BC,IAD+B;AAE/BC,MAAM,CAAEC,aAFuB;AAG/BC,WAH+B;AAI/BC,MAJ+B;AAK/BC,MAAM,CAAG,KALsB;AAM/BC,QAN+B,CAAT;AAOrB;AACD,KAAM;AACJV,OAAO,CAAE,CAAE,CAACI,IAAD,EAAQO,cAAV,CADL;AAEFC,aAFJ;AAGA,KAAMC,CAAAA,SAAS,CAAGF,cAAc,GAAKG,SAArC;AACA,KAAMT,CAAAA,MAAM,CAAGQ,SAAS,CAAGF,cAAH,CAAoBL,aAA5C;AACA,MAAO,CAAEF,IAAF,CAAQC,MAAR,CAAgBE,WAAhB,CAA6BM,SAA7B,CAAwCL,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAAP;AACD,CAdD","sourcesContent":["import { constants } from 'os'\n\nimport { SIGNALS } from './core.js'\nimport { getRealtimeSignals } from './realtime.js'\n\n// Retrieve list of know signals (including realtime) with information about\n// them\nexport const getSignals = function() {\n const realtimeSignals = getRealtimeSignals()\n const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)\n return signals\n}\n\n// Normalize signal:\n// - `number`: signal numbers are OS-specific. This is taken into account by\n// `os.constants.signals`. However we provide a default `number` since some\n// signals are not defined for some OS.\n// - `forced`: set default to `false`\n// - `supported`: set value\nconst normalizeSignal = function({\n name,\n number: defaultNumber,\n description,\n action,\n forced = false,\n standard,\n}) {\n const {\n signals: { [name]: constantSignal },\n } = constants\n const supported = constantSignal !== undefined\n const number = supported ? constantSignal : defaultNumber\n return { name, number, description, supported, action, forced, standard }\n}\n"],"file":"src/signals.js"} \ No newline at end of file diff --git a/node_modules/human-signals/package.json b/node_modules/human-signals/package.json new file mode 100644 index 0000000..fd1d027 --- /dev/null +++ b/node_modules/human-signals/package.json @@ -0,0 +1,64 @@ +{ + "name": "human-signals", + "version": "2.1.0", + "main": "build/src/main.js", + "files": [ + "build/src", + "!~" + ], + "scripts": { + "test": "gulp test" + }, + "husky": { + "hooks": { + "pre-push": "gulp check --full" + } + }, + "description": "Human-friendly process signals", + "keywords": [ + "signal", + "signals", + "handlers", + "error-handling", + "errors", + "interrupts", + "sigterm", + "sigint", + "irq", + "process", + "exit", + "exit-code", + "status", + "operating-system", + "es6", + "javascript", + "linux", + "macos", + "windows", + "nodejs" + ], + "license": "Apache-2.0", + "homepage": "https://git.io/JeluP", + "repository": "ehmicky/human-signals", + "bugs": { + "url": "https://github.com/ehmicky/human-signals/issues" + }, + "author": "ehmicky (https://github.com/ehmicky)", + "directories": { + "lib": "src", + "test": "test" + }, + "types": "build/src/main.d.ts", + "dependencies": {}, + "devDependencies": { + "@ehmicky/dev-tasks": "^0.31.9", + "ajv": "^6.12.0", + "ava": "^3.5.0", + "gulp": "^4.0.2", + "husky": "^4.2.3", + "test-each": "^2.0.0" + }, + "engines": { + "node": ">=10.17.0" + } +} diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE new file mode 100644 index 0000000..842218c --- /dev/null +++ b/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md new file mode 100644 index 0000000..0416af5 --- /dev/null +++ b/node_modules/is-extglob/README.md @@ -0,0 +1,107 @@ +# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extglob +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Escaped extglobs: + +```js +isExtglob('\\?(abc)'); +isExtglob('\\@(abc)'); +isExtglob('\\!(abc)'); +isExtglob('\\*(abc)'); +isExtglob('\\+(abc)'); +``` + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## History + +**v2.0** + +Adds support for escaping. Escaped exglobs no longer return true. + +## About + +### Related projects + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js new file mode 100644 index 0000000..c1d986f --- /dev/null +++ b/node_modules/is-extglob/index.js @@ -0,0 +1,20 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json new file mode 100644 index 0000000..7a90836 --- /dev/null +++ b/node_modules/is-extglob/package.json @@ -0,0 +1,69 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md new file mode 100644 index 0000000..740724b --- /dev/null +++ b/node_modules/is-glob/README.md @@ -0,0 +1,206 @@ +# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-glob +``` + +You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +### Default behavior + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('abc/\\@(a).js'); +isGlob('abc/\\!(a).js'); +isGlob('abc/\\+(a).js'); +isGlob('abc/\\*(a).js'); +isGlob('abc/\\?(a).js'); +isGlob('\\!foo.js'); +isGlob('\\*.js'); +isGlob('\\*\\*/abc.js'); +isGlob('abc/\\*.js'); +isGlob('abc/\\(aaa|bbb).js'); +isGlob('abc/\\[a-z].js'); +isGlob('abc/\\{a,b}.js'); +//=> false +``` + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob('abc/?.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +### Option strict + +When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that +some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js', {strict: false}); +isGlob('*.js', {strict: false}); +isGlob('**/abc.js', {strict: false}); +isGlob('abc/*.js', {strict: false}); +isGlob('abc/(aaa|bbb).js', {strict: false}); +isGlob('abc/[a-z].js', {strict: false}); +isGlob('abc/{a,b}.js', {strict: false}); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js', {strict: false}); +isGlob('abc/!(a).js', {strict: false}); +isGlob('abc/+(a).js', {strict: false}); +isGlob('abc/*(a).js', {strict: false}); +isGlob('abc/?(a).js', {strict: false}); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('\\!foo.js', {strict: false}); +isGlob('\\*.js', {strict: false}); +isGlob('\\*\\*/abc.js', {strict: false}); +isGlob('abc/\\*.js', {strict: false}); +isGlob('abc/\\(aaa|bbb).js', {strict: false}); +isGlob('abc/\\[a-z].js', {strict: false}); +isGlob('abc/\\{a,b}.js', {strict: false}); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") +* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 47 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [doowb](https://github.com/doowb) | +| 1 | [phated](https://github.com/phated) | +| 1 | [danhper](https://github.com/danhper) | +| 1 | [paulmillr](https://github.com/paulmillr) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js new file mode 100644 index 0000000..620f563 --- /dev/null +++ b/node_modules/is-glob/index.js @@ -0,0 +1,150 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = require('is-extglob'); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var check = strictCheck; + + // optionally relax check + if (options && options.strict === false) { + check = relaxedCheck; + } + + return check(str); +}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json new file mode 100644 index 0000000..858af03 --- /dev/null +++ b/node_modules/is-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "4.0.3", + "homepage": "https://github.com/micromatch/is-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Daniel Perez (https://tuvistavie.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/is-glob", + "bugs": { + "url": "https://github.com/micromatch/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha && node benchmark.js" + }, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assemble", + "base", + "update", + "verb" + ] + }, + "reflinks": [ + "assemble", + "bach", + "base", + "composer", + "gulp", + "has-glob", + "is-valid-glob", + "micromatch", + "npm", + "scaffold", + "verb", + "vinyl" + ] + } +} diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md new file mode 100644 index 0000000..eb8149e --- /dev/null +++ b/node_modules/is-number/README.md @@ -0,0 +1,187 @@ +# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a finite number. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-number +``` + +## Why is this needed? + +In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: + +```js +console.log(+[]); //=> 0 +console.log(+''); //=> 0 +console.log(+' '); //=> 0 +console.log(typeof NaN); //=> 'number' +``` + +This library offers a performant way to smooth out edge cases like these. + +## Usage + +```js +const isNumber = require('is-number'); +``` + +See the [tests](./test.js) for more examples. + +### true + +```js +isNumber(5e3); // true +isNumber(0xff); // true +isNumber(-1.1); // true +isNumber(0); // true +isNumber(1); // true +isNumber(1.1); // true +isNumber(10); // true +isNumber(10.10); // true +isNumber(100); // true +isNumber('-1.1'); // true +isNumber('0'); // true +isNumber('012'); // true +isNumber('0xff'); // true +isNumber('1'); // true +isNumber('1.1'); // true +isNumber('10'); // true +isNumber('10.10'); // true +isNumber('100'); // true +isNumber('5e3'); // true +isNumber(parseInt('012')); // true +isNumber(parseFloat('012')); // true +``` + +### False + +Everything else is false, as you would expect: + +```js +isNumber(Infinity); // false +isNumber(NaN); // false +isNumber(null); // false +isNumber(undefined); // false +isNumber(''); // false +isNumber(' '); // false +isNumber('foo'); // false +isNumber([1]); // false +isNumber([]); // false +isNumber(function () {}); // false +isNumber({}); // false +``` + +## Release history + +### 7.0.0 + +* Refactor. Now uses `.isFinite` if it exists. +* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. + +### 6.0.0 + +* Optimizations, thanks to @benaadams. + +### 5.0.0 + +**Breaking changes** + +* removed support for `instanceof Number` and `instanceof String` + +## Benchmarks + +As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. + +``` +# all +v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) +v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) +parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) +fastest is 'v7.0' + +# string +v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) +v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) +parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) +fastest is 'parseFloat,v7.0' + +# number +v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) +v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) +parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) +fastest is 'v6.0' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 49 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [charlike-old](https://github.com/charlike-old) | +| 1 | [benaadams](https://github.com/benaadams) | +| 1 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js new file mode 100644 index 0000000..27f19b7 --- /dev/null +++ b/node_modules/is-number/index.js @@ -0,0 +1,18 @@ +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); + } + return false; +}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json new file mode 100644 index 0000000..3715072 --- /dev/null +++ b/node_modules/is-number/package.json @@ -0,0 +1,82 @@ +{ + "name": "is-number", + "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", + "version": "7.0.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Olsten Larck (https://i.am.charlike.online)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.12.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi": "^0.3.1", + "benchmark": "^2.1.4", + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "cast", + "check", + "coerce", + "coercion", + "finite", + "integer", + "is", + "isnan", + "is-nan", + "is-num", + "is-number", + "isnumber", + "isfinite", + "istype", + "kind", + "math", + "nan", + "num", + "number", + "numeric", + "parseFloat", + "parseInt", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "is-plain-object", + "is-primitive", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/is-stream/index.d.ts b/node_modules/is-stream/index.d.ts new file mode 100644 index 0000000..eee2e83 --- /dev/null +++ b/node_modules/is-stream/index.d.ts @@ -0,0 +1,79 @@ +import * as stream from 'stream'; + +declare const isStream: { + /** + @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream(fs.createReadStream('unicorn.png')); + //=> true + + isStream({}); + //=> false + ``` + */ + (stream: unknown): stream is stream.Stream; + + /** + @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.writable(fs.createWriteStrem('unicorn.txt')); + //=> true + ``` + */ + writable(stream: unknown): stream is stream.Writable; + + /** + @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.readable(fs.createReadStream('unicorn.png')); + //=> true + ``` + */ + readable(stream: unknown): stream is stream.Readable; + + /** + @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + + @example + ``` + import {Duplex} from 'stream'; + import isStream = require('is-stream'); + + isStream.duplex(new Duplex()); + //=> true + ``` + */ + duplex(stream: unknown): stream is stream.Duplex; + + /** + @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + + @example + ``` + import * as fs from 'fs'; + import Stringify = require('streaming-json-stringify'); + import isStream = require('is-stream'); + + isStream.transform(Stringify()); + //=> true + ``` + */ + transform(input: unknown): input is stream.Transform; +}; + +export = isStream; diff --git a/node_modules/is-stream/index.js b/node_modules/is-stream/index.js new file mode 100644 index 0000000..2e43434 --- /dev/null +++ b/node_modules/is-stream/index.js @@ -0,0 +1,28 @@ +'use strict'; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; diff --git a/node_modules/is-stream/license b/node_modules/is-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/is-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-stream/package.json b/node_modules/is-stream/package.json new file mode 100644 index 0000000..c3b5673 --- /dev/null +++ b/node_modules/is-stream/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-stream", + "version": "2.0.1", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "@types/node": "^11.13.6", + "ava": "^1.4.1", + "tempy": "^0.3.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-stream/readme.md b/node_modules/is-stream/readme.md new file mode 100644 index 0000000..19308e7 --- /dev/null +++ b/node_modules/is-stream/readme.md @@ -0,0 +1,60 @@ +# is-stream + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + +## Install + +``` +$ npm install is-stream +``` + +## Usage + +```js +const fs = require('fs'); +const isStream = require('is-stream'); + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + +## API + +### isStream(stream) + +Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +#### isStream.writable(stream) + +Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +#### isStream.readable(stream) + +Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +#### isStream.duplex(stream) + +Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +#### isStream.transform(stream) + +Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +## Related + +- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/isexe/.npmignore b/node_modules/isexe/.npmignore new file mode 100644 index 0000000..c1cb757 --- /dev/null +++ b/node_modules/isexe/.npmignore @@ -0,0 +1,2 @@ +.nyc_output/ +coverage/ diff --git a/node_modules/isexe/LICENSE b/node_modules/isexe/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/isexe/README.md b/node_modules/isexe/README.md new file mode 100644 index 0000000..35769e8 --- /dev/null +++ b/node_modules/isexe/README.md @@ -0,0 +1,51 @@ +# isexe + +Minimal module to check if a file is executable, and a normal file. + +Uses `fs.stat` and tests against the `PATHEXT` environment variable on +Windows. + +## USAGE + +```javascript +var isexe = require('isexe') +isexe('some-file-name', function (err, isExe) { + if (err) { + console.error('probably file does not exist or something', err) + } else if (isExe) { + console.error('this thing can be run') + } else { + console.error('cannot be run') + } +}) + +// same thing but synchronous, throws errors +var isExe = isexe.sync('some-file-name') + +// treat errors as just "not executable" +isexe('maybe-missing-file', { ignoreErrors: true }, callback) +var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) +``` + +## API + +### `isexe(path, [options], [callback])` + +Check if the path is executable. If no callback provided, and a +global `Promise` object is available, then a Promise will be returned. + +Will raise whatever errors may be raised by `fs.stat`, unless +`options.ignoreErrors` is set to true. + +### `isexe.sync(path, [options])` + +Same as `isexe` but returns the value and throws any errors raised. + +### Options + +* `ignoreErrors` Treat all errors as "no, this is not executable", but + don't raise them. +* `uid` Number to use as the user id +* `gid` Number to use as the group id +* `pathExt` List of path extensions to use instead of `PATHEXT` + environment variable on Windows. diff --git a/node_modules/isexe/index.js b/node_modules/isexe/index.js new file mode 100644 index 0000000..553fb32 --- /dev/null +++ b/node_modules/isexe/index.js @@ -0,0 +1,57 @@ +var fs = require('fs') +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = require('./windows.js') +} else { + core = require('./mode.js') +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} diff --git a/node_modules/isexe/mode.js b/node_modules/isexe/mode.js new file mode 100644 index 0000000..1995ea4 --- /dev/null +++ b/node_modules/isexe/mode.js @@ -0,0 +1,41 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} diff --git a/node_modules/isexe/package.json b/node_modules/isexe/package.json new file mode 100644 index 0000000..e452689 --- /dev/null +++ b/node_modules/isexe/package.json @@ -0,0 +1,31 @@ +{ + "name": "isexe", + "version": "2.0.0", + "description": "Minimal module to check if a file is executable.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.5.0", + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/isexe.git" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/isaacs/isexe/issues" + }, + "homepage": "https://github.com/isaacs/isexe#readme" +} diff --git a/node_modules/isexe/test/basic.js b/node_modules/isexe/test/basic.js new file mode 100644 index 0000000..d926df6 --- /dev/null +++ b/node_modules/isexe/test/basic.js @@ -0,0 +1,221 @@ +var t = require('tap') +var fs = require('fs') +var path = require('path') +var fixture = path.resolve(__dirname, 'fixtures') +var meow = fixture + '/meow.cat' +var mine = fixture + '/mine.cat' +var ours = fixture + '/ours.cat' +var fail = fixture + '/fail.false' +var noent = fixture + '/enoent.exe' +var mkdirp = require('mkdirp') +var rimraf = require('rimraf') + +var isWindows = process.platform === 'win32' +var hasAccess = typeof fs.access === 'function' +var winSkip = isWindows && 'windows' +var accessSkip = !hasAccess && 'no fs.access function' +var hasPromise = typeof Promise === 'function' +var promiseSkip = !hasPromise && 'no global Promise' + +function reset () { + delete require.cache[require.resolve('../')] + return require('../') +} + +t.test('setup fixtures', function (t) { + rimraf.sync(fixture) + mkdirp.sync(fixture) + fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') + fs.chmodSync(meow, parseInt('0755', 8)) + fs.writeFileSync(fail, '#!/usr/bin/env false\n') + fs.chmodSync(fail, parseInt('0644', 8)) + fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') + fs.chmodSync(mine, parseInt('0744', 8)) + fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') + fs.chmodSync(ours, parseInt('0754', 8)) + t.end() +}) + +t.test('promise', { skip: promiseSkip }, function (t) { + var isexe = reset() + t.test('meow async', function (t) { + isexe(meow).then(function (is) { + t.ok(is) + t.end() + }) + }) + t.test('fail async', function (t) { + isexe(fail).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.test('noent async', function (t) { + isexe(noent).catch(function (er) { + t.ok(er) + t.end() + }) + }) + t.test('noent ignore async', function (t) { + isexe(noent, { ignoreErrors: true }).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.end() +}) + +t.test('no promise', function (t) { + global.Promise = null + var isexe = reset() + t.throws('try to meow a promise', function () { + isexe(meow) + }) + t.end() +}) + +t.test('access', { skip: accessSkip || winSkip }, function (t) { + runTest(t) +}) + +t.test('mode', { skip: winSkip }, function (t) { + delete fs.access + delete fs.accessSync + var isexe = reset() + t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) + t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) + runTest(t) +}) + +t.test('windows', function (t) { + global.TESTING_WINDOWS = true + var pathExt = '.EXE;.CAT;.CMD;.COM' + t.test('pathExt option', function (t) { + runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) + }) + t.test('pathExt env', function (t) { + process.env.PATHEXT = pathExt + runTest(t) + }) + t.test('no pathExt', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: '', skipFail: true }) + }) + t.test('pathext with empty entry', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: ';' + pathExt, skipFail: true }) + }) + t.end() +}) + +t.test('cleanup', function (t) { + rimraf.sync(fixture) + t.end() +}) + +function runTest (t, options) { + var isexe = reset() + + var optionsIgnore = Object.create(options || {}) + optionsIgnore.ignoreErrors = true + + if (!options || !options.skipFail) { + t.notOk(isexe.sync(fail, options)) + } + t.notOk(isexe.sync(noent, optionsIgnore)) + if (!options) { + t.ok(isexe.sync(meow)) + } else { + t.ok(isexe.sync(meow, options)) + } + + t.ok(isexe.sync(mine, options)) + t.ok(isexe.sync(ours, options)) + t.throws(function () { + isexe.sync(noent, options) + }) + + t.test('meow async', function (t) { + if (!options) { + isexe(meow, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } else { + isexe(meow, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } + }) + + t.test('mine async', function (t) { + isexe(mine, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + t.test('ours async', function (t) { + isexe(ours, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + if (!options || !options.skipFail) { + t.test('fail async', function (t) { + isexe(fail, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + } + + t.test('noent async', function (t) { + isexe(noent, options, function (er, is) { + t.ok(er) + t.notOk(is) + t.end() + }) + }) + + t.test('noent ignore async', function (t) { + isexe(noent, optionsIgnore, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.test('directory is not executable', function (t) { + isexe(__dirname, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.end() +} diff --git a/node_modules/isexe/windows.js b/node_modules/isexe/windows.js new file mode 100644 index 0000000..3499673 --- /dev/null +++ b/node_modules/isexe/windows.js @@ -0,0 +1,42 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} + +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} diff --git a/node_modules/merge-stream/LICENSE b/node_modules/merge-stream/LICENSE new file mode 100644 index 0000000..94a4c0a --- /dev/null +++ b/node_modules/merge-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Stephen Sugden (stephensugden.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/merge-stream/README.md b/node_modules/merge-stream/README.md new file mode 100644 index 0000000..0d54841 --- /dev/null +++ b/node_modules/merge-stream/README.md @@ -0,0 +1,78 @@ +# merge-stream + +Merge (interleave) a bunch of streams. + +[![build status](https://secure.travis-ci.org/grncdr/merge-stream.svg?branch=master)](http://travis-ci.org/grncdr/merge-stream) + +## Synopsis + +```javascript +var stream1 = new Stream(); +var stream2 = new Stream(); + +var merged = mergeStream(stream1, stream2); + +var stream3 = new Stream(); +merged.add(stream3); +merged.isEmpty(); +//=> false +``` + +## Description + +This is adapted from [event-stream](https://github.com/dominictarr/event-stream) separated into a new module, using Streams3. + +## API + +### `mergeStream` + +Type: `function` + +Merges an arbitrary number of streams. Returns a merged stream. + +#### `merged.add` + +A method to dynamically add more sources to the stream. The argument supplied to `add` can be either a source or an array of sources. + +#### `merged.isEmpty` + +A method that tells you if the merged stream is empty. + +When a stream is "empty" (aka. no sources were added), it could not be returned to a gulp task. + +So, we could do something like this: + +```js +stream = require('merge-stream')(); +// Something like a loop to add some streams to the merge stream +// stream.add(streamA); +// stream.add(streamB); +return stream.isEmpty() ? null : stream; +``` + +## Gulp example + +An example use case for **merge-stream** is to combine parts of a task in a project's **gulpfile.js** like this: + +```js +const gulp = require('gulp'); +const htmlValidator = require('gulp-w3c-html-validator'); +const jsHint = require('gulp-jshint'); +const mergeStream = require('merge-stream'); + +function lint() { + return mergeStream( + gulp.src('src/*.html') + .pipe(htmlValidator()) + .pipe(htmlValidator.reporter()), + gulp.src('src/*.js') + .pipe(jsHint()) + .pipe(jsHint.reporter()) + ); +} +gulp.task('lint', lint); +``` + +## License + +MIT diff --git a/node_modules/merge-stream/index.js b/node_modules/merge-stream/index.js new file mode 100644 index 0000000..b1a9e1a --- /dev/null +++ b/node_modules/merge-stream/index.js @@ -0,0 +1,41 @@ +'use strict'; + +const { PassThrough } = require('stream'); + +module.exports = function (/*streams...*/) { + var sources = [] + var output = new PassThrough({objectMode: true}) + + output.setMaxListeners(0) + + output.add = add + output.isEmpty = isEmpty + + output.on('unpipe', remove) + + Array.prototype.slice.call(arguments).forEach(add) + + return output + + function add (source) { + if (Array.isArray(source)) { + source.forEach(add) + return this + } + + sources.push(source); + source.once('end', remove.bind(null, source)) + source.once('error', output.emit.bind(output, 'error')) + source.pipe(output, {end: false}) + return this + } + + function isEmpty () { + return sources.length == 0; + } + + function remove (source) { + sources = sources.filter(function (it) { return it !== source }) + if (!sources.length && output.readable) { output.end() } + } +} diff --git a/node_modules/merge-stream/package.json b/node_modules/merge-stream/package.json new file mode 100644 index 0000000..1a4c54c --- /dev/null +++ b/node_modules/merge-stream/package.json @@ -0,0 +1,19 @@ +{ + "name": "merge-stream", + "version": "2.0.0", + "description": "Create a stream that emits events from multiple other streams", + "files": [ + "index.js" + ], + "scripts": { + "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" + }, + "repository": "grncdr/merge-stream", + "author": "Stephen Sugden ", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "from2": "^2.0.3", + "istanbul": "^0.4.5" + } +} diff --git a/node_modules/merge2/LICENSE b/node_modules/merge2/LICENSE new file mode 100644 index 0000000..31dd9c7 --- /dev/null +++ b/node_modules/merge2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2020 Teambition + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/merge2/README.md b/node_modules/merge2/README.md new file mode 100644 index 0000000..27f8eb9 --- /dev/null +++ b/node_modules/merge2/README.md @@ -0,0 +1,144 @@ +# merge2 + +Merge multiple streams into one stream in sequence or parallel. + +[![NPM version][npm-image]][npm-url] +[![Build Status][travis-image]][travis-url] +[![Downloads][downloads-image]][downloads-url] + +## Install + +Install with [npm](https://npmjs.org/package/merge2) + +```sh +npm install merge2 +``` + +## Usage + +```js +const gulp = require('gulp') +const merge2 = require('merge2') +const concat = require('gulp-concat') +const minifyHtml = require('gulp-minify-html') +const ngtemplate = require('gulp-ngtemplate') + +gulp.task('app-js', function () { + return merge2( + gulp.src('static/src/tpl/*.html') + .pipe(minifyHtml({empty: true})) + .pipe(ngtemplate({ + module: 'genTemplates', + standalone: true + }) + ), gulp.src([ + 'static/src/js/app.js', + 'static/src/js/locale_zh-cn.js', + 'static/src/js/router.js', + 'static/src/js/tools.js', + 'static/src/js/services.js', + 'static/src/js/filters.js', + 'static/src/js/directives.js', + 'static/src/js/controllers.js' + ]) + ) + .pipe(concat('app.js')) + .pipe(gulp.dest('static/dist/js/')) +}) +``` + +```js +const stream = merge2([stream1, stream2], stream3, {end: false}) +//... +stream.add(stream4, stream5) +//.. +stream.end() +``` + +```js +// equal to merge2([stream1, stream2], stream3) +const stream = merge2() +stream.add([stream1, stream2]) +stream.add(stream3) +``` + +```js +// merge order: +// 1. merge `stream1`; +// 2. merge `stream2` and `stream3` in parallel after `stream1` merged; +// 3. merge 'stream4' after `stream2` and `stream3` merged; +const stream = merge2(stream1, [stream2, stream3], stream4) + +// merge order: +// 1. merge `stream5` and `stream6` in parallel after `stream4` merged; +// 2. merge 'stream7' after `stream5` and `stream6` merged; +stream.add([stream5, stream6], stream7) +``` + +```js +// nest merge +// equal to merge2(stream1, stream2, stream6, stream3, [stream4, stream5]); +const streamA = merge2(stream1, stream2) +const streamB = merge2(stream3, [stream4, stream5]) +const stream = merge2(streamA, streamB) +streamA.add(stream6) +``` + +## API + +```js +const merge2 = require('merge2') +``` + +### merge2() + +### merge2(options) + +### merge2(stream1, stream2, ..., streamN) + +### merge2(stream1, stream2, ..., streamN, options) + +### merge2(stream1, [stream2, stream3, ...], streamN, options) + +return a duplex stream (mergedStream). streams in array will be merged in parallel. + +### mergedStream.add(stream) + +### mergedStream.add(stream1, [stream2, stream3, ...], ...) + +return the mergedStream. + +### mergedStream.on('queueDrain', function() {}) + +It will emit 'queueDrain' when all streams merged. If you set `end === false` in options, this event give you a notice that should add more streams to merge or end the mergedStream. + +#### stream + +*option* +Type: `Readable` or `Duplex` or `Transform` stream. + +#### options + +*option* +Type: `Object`. + +* **end** - `Boolean` - if `end === false` then mergedStream will not be auto ended, you should end by yourself. **Default:** `undefined` + +* **pipeError** - `Boolean` - if `pipeError === true` then mergedStream will emit `error` event from source streams. **Default:** `undefined` + +* **objectMode** - `Boolean` . **Default:** `true` + +`objectMode` and other options(`highWaterMark`, `defaultEncoding` ...) is same as Node.js `Stream`. + +## License + +MIT © [Teambition](https://www.teambition.com) + +[npm-url]: https://npmjs.org/package/merge2 +[npm-image]: http://img.shields.io/npm/v/merge2.svg + +[travis-url]: https://travis-ci.org/teambition/merge2 +[travis-image]: http://img.shields.io/travis/teambition/merge2.svg + +[downloads-url]: https://npmjs.org/package/merge2 +[downloads-image]: http://img.shields.io/npm/dm/merge2.svg?style=flat-square diff --git a/node_modules/merge2/index.js b/node_modules/merge2/index.js new file mode 100644 index 0000000..78a61ed --- /dev/null +++ b/node_modules/merge2/index.js @@ -0,0 +1,144 @@ +'use strict' +/* + * merge2 + * https://github.com/teambition/merge2 + * + * Copyright (c) 2014-2020 Teambition + * Licensed under the MIT license. + */ +const Stream = require('stream') +const PassThrough = Stream.PassThrough +const slice = Array.prototype.slice + +module.exports = merge2 + +function merge2 () { + const streamsQueue = [] + const args = slice.call(arguments) + let merging = false + let options = args[args.length - 1] + + if (options && !Array.isArray(options) && options.pipe == null) { + args.pop() + } else { + options = {} + } + + const doEnd = options.end !== false + const doPipeError = options.pipeError === true + if (options.objectMode == null) { + options.objectMode = true + } + if (options.highWaterMark == null) { + options.highWaterMark = 64 * 1024 + } + const mergedStream = PassThrough(options) + + function addStream () { + for (let i = 0, len = arguments.length; i < len; i++) { + streamsQueue.push(pauseStreams(arguments[i], options)) + } + mergeStream() + return this + } + + function mergeStream () { + if (merging) { + return + } + merging = true + + let streams = streamsQueue.shift() + if (!streams) { + process.nextTick(endStream) + return + } + if (!Array.isArray(streams)) { + streams = [streams] + } + + let pipesCount = streams.length + 1 + + function next () { + if (--pipesCount > 0) { + return + } + merging = false + mergeStream() + } + + function pipe (stream) { + function onend () { + stream.removeListener('merge2UnpipeEnd', onend) + stream.removeListener('end', onend) + if (doPipeError) { + stream.removeListener('error', onerror) + } + next() + } + function onerror (err) { + mergedStream.emit('error', err) + } + // skip ended stream + if (stream._readableState.endEmitted) { + return next() + } + + stream.on('merge2UnpipeEnd', onend) + stream.on('end', onend) + + if (doPipeError) { + stream.on('error', onerror) + } + + stream.pipe(mergedStream, { end: false }) + // compatible for old stream + stream.resume() + } + + for (let i = 0; i < streams.length; i++) { + pipe(streams[i]) + } + + next() + } + + function endStream () { + merging = false + // emit 'queueDrain' when all streams merged. + mergedStream.emit('queueDrain') + if (doEnd) { + mergedStream.end() + } + } + + mergedStream.setMaxListeners(0) + mergedStream.add = addStream + mergedStream.on('unpipe', function (stream) { + stream.emit('merge2UnpipeEnd') + }) + + if (args.length) { + addStream.apply(null, args) + } + return mergedStream +} + +// check and pause streams for pipe. +function pauseStreams (streams, options) { + if (!Array.isArray(streams)) { + // Backwards-compat with old-style streams + if (!streams._readableState && streams.pipe) { + streams = streams.pipe(PassThrough(options)) + } + if (!streams._readableState || !streams.pause || !streams.pipe) { + throw new Error('Only readable stream can be merged.') + } + streams.pause() + } else { + for (let i = 0, len = streams.length; i < len; i++) { + streams[i] = pauseStreams(streams[i], options) + } + } + return streams +} diff --git a/node_modules/merge2/package.json b/node_modules/merge2/package.json new file mode 100644 index 0000000..7777307 --- /dev/null +++ b/node_modules/merge2/package.json @@ -0,0 +1,43 @@ +{ + "name": "merge2", + "description": "Merge multiple streams into one stream in sequence or parallel.", + "authors": [ + "Yan Qing " + ], + "license": "MIT", + "version": "1.4.1", + "main": "./index.js", + "repository": { + "type": "git", + "url": "git@github.com:teambition/merge2.git" + }, + "homepage": "https://github.com/teambition/merge2", + "keywords": [ + "merge2", + "multiple", + "sequence", + "parallel", + "merge", + "stream", + "merge stream", + "sync" + ], + "engines": { + "node": ">= 8" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^14.3.4", + "through2": "^3.0.1", + "thunks": "^4.9.6", + "tman": "^1.10.0", + "to-through": "^2.0.0" + }, + "scripts": { + "test": "standard && tman" + }, + "files": [ + "README.md", + "index.js" + ] +} diff --git a/node_modules/metacommon/.travis.yml b/node_modules/metacommon/.travis.yml new file mode 100644 index 0000000..d56cc68 --- /dev/null +++ b/node_modules/metacommon/.travis.yml @@ -0,0 +1,5 @@ +language: c +services: docker +os: linux +script: + - npm run docker diff --git a/node_modules/metacommon/README.md b/node_modules/metacommon/README.md new file mode 100644 index 0000000..b644db1 --- /dev/null +++ b/node_modules/metacommon/README.md @@ -0,0 +1,2 @@ +# metacommon [![npm version](https://badge.fury.io/js/metacommon.svg)](https://badge.fury.io/js/metacommon) +C++ Header-only repository with macros and very much metaprogramming diff --git a/node_modules/metacommon/cmaki.yml b/node_modules/metacommon/cmaki.yml new file mode 100644 index 0000000..1415403 --- /dev/null +++ b/node_modules/metacommon/cmaki.yml @@ -0,0 +1,14 @@ +- metacommon: + <<: *thirdparty_defaults + post_install: + - ./*.h include/metacommon/ + source: https://github.com/makiolo/metacommon.git + build: + | + #!/bin/bash + CMAKI_INSTALL=$SELFHOME npm install + targets: + # header only + - dummy: + info: + <<: *library_dynamic diff --git a/node_modules/metacommon/common.h b/node_modules/metacommon/common.h new file mode 100644 index 0000000..f439a2d --- /dev/null +++ b/node_modules/metacommon/common.h @@ -0,0 +1,426 @@ +#ifndef _META_COMMON_ +#define _META_COMMON_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifndef STATIC_MULTITHREAD +#if defined(__clang__) +#define STATIC_MULTITHREAD static +#elif defined(__GNUC__) || defined(__GNUG__) +#define STATIC_MULTITHREAD static __thread +#elif defined(_MSC_VER) +#define STATIC_MULTITHREAD __declspec(thread) static +#endif +#endif + +#if __cplusplus < 201402L && 0 + +namespace std { + +template +std::unique_ptr make_unique(Args&&... args) +{ + return std::unique_ptr(new T(std::forward(args)...)); +} + +} + +#endif + +namespace ctti { + +// http://stackoverflow.com/a/15863804 + +// helper function +constexpr unsigned c_strlen( char const* str, unsigned count = 0 ) +{ + return ('\0' == str[0]) ? count : c_strlen(str+1, count+1); +} + +// helper "function" struct +template < char t_c, char... tt_c > +struct rec_print +{ + static void print() + { + std::cout << t_c; + rec_print < tt_c... > :: print (); + } +}; + +template < char t_c > +struct rec_print < t_c > +{ + static void print() + { + std::cout << t_c; + } +}; + +// helper "function" struct +template < char t_c, char... tt_c > +struct rec_get +{ + static void get(std::stringstream& ss) + { + ss << t_c; + rec_get < tt_c... > :: get (ss); + } +}; + +template < char t_c > +struct rec_get < t_c > +{ + static void get(std::stringstream& ss) + { + ss << t_c; + } +}; + +template < char t_c, char... tt_c > +struct rec_hash +{ + static constexpr size_t hash(size_t seed) + { + return rec_hash ::hash(seed * 33 ^ static_cast(t_c)); + } +}; + +template < char t_c > +struct rec_hash < t_c > +{ + static constexpr size_t hash(size_t seed) + { + return seed * 33 ^ static_cast(t_c); + } +}; + +// destination "template string" type +template < char... tt_c > +struct str_typed_string +{ + static void print() + { + rec_print < tt_c... > :: print(); + std::cout << std::endl; + } + + static std::string get() + { + std::stringstream ss; + rec_get :: get(ss); + return ss.str(); + } + + static constexpr size_t hash() + { + return rec_hash ::hash(5381); + } +}; + +// struct to str_type a `char const*` to an `str_typed_string` type +template < typename T_StrProvider, unsigned t_len, char... tt_c > +struct str_type_impl +{ + using result = typename str_type_impl < T_StrProvider, t_len-1, + T_StrProvider::KEY()[t_len-1], + tt_c... > :: result; +}; + +template < typename T_StrProvider, char... tt_c > +struct str_type_impl < T_StrProvider, 0, tt_c... > +{ + using result = str_typed_string < tt_c... >; +}; + +// syntactical sugar +template < typename T_StrProvider > +using str_type = typename str_type_impl < T_StrProvider, c_strlen(T_StrProvider::KEY()) > :: result; + +} // end namespace + +namespace mc { + +/* + +template +class get_type +{ + template + using type = typename std::tuple_element >::type; +}; + +template +void __foreach_tuple(Function&& func, T&& elem) +{ + func(elem); +} + +template +void __foreach_tuple(Function&& func, T&& elem, Args&& ... args) +{ + // static_assert(std::is_same::type<0> >::value, ""); + func(elem); + __foreach_tuple(std::forward(func), std::forward(args)...); +} + +template +void _foreach_tuple(const std::tuple& t, Function&& func, std::index_sequence) +{ + __foreach_tuple(std::forward(func), std::get(t)...); +} + +template +void foreach_tuple(const std::tuple& t, Function&& func) +{ + _foreach_tuple(t, std::forward(func), std::make_index_sequence < + std::tuple_size< std::tuple >::value + >()); +} + +template +void foreach_args(Function&& func, Args&& ... args) +{ + foreach_tuple(std::make_tuple(std::forward(args)...), std::forward(func)); +} + +template +auto _vector_to_tuple(const std::vector& v, std::index_sequence) +{ + return std::make_tuple(v[N]...); +} + +template +auto vector_to_tuple(const std::vector& v) +{ + assert(v.size() >= N); + return _vector_to_tuple(v, std::make_index_sequence()); +} + +template +std::vector tuple_to_vector(const std::tuple& t) +{ + std::vector v; + foreach_tuple(t, [&v](const auto& d) { + v.emplace_back(d); + }); + return v; +} + +// http://aherrmann.github.io/programming/2016/02/28/unpacking-tuples-in-cpp14/ +template +constexpr auto index_apply_impl(F&& f, std::index_sequence) { + return f(std::integral_constant {}...); +} + +template +constexpr auto index_apply(F&& f) { + return index_apply_impl(std::forward(f), std::make_index_sequence{}); +} + +template +constexpr auto _head(Tuple t) { + return index_apply<1>([&](auto... Is) { + return std::make_tuple(std::get(t)...); + }); +} + +*/ + +template +constexpr auto head(Tuple t) +{ + return std::get<0>(_head(t)); +} + +/* + +template +constexpr auto tail(Tuple t) { + return index_apply{}-1u>([&](auto... Is) { + return std::make_tuple(std::get(t)...); + }); +} + +template +constexpr auto reverse(Tuple t) { + return index_apply{}>( + [&](auto... Is) { + return std::make_tuple( + std::get{} - 1 - Is>(t)...); + }); +} + +// http://open-std.org/jtc1/sc22/wg21/docs/papers/2014/n3829.pdf +template +constexpr auto apply(Function&& f, Tuple&& t) { + return index_apply{}>( + [&](auto... Is) { + return std::forward(f)( std::get(std::forward(t))... ); + }); +} + +*/ + +// http://talesofcpp.fusionfenix.com/post-14/true-story-moving-past-bind +template +auto bind(F&& f, Args&&... args) { + return [ + f = std::forward(f) + , args = std::make_tuple(std::forward(args)...) + ]() mutable -> decltype(auto) { + return apply(std::move(f), std::move(args)); + }; +} + +} // end namespace mc + +// method macros +#define DEFINE_KEY(__CLASS__) \ + constexpr static char const* KEY() { return #__CLASS__; } \ + virtual const std::string& getKEY() const { static std::string key = #__CLASS__; return key; } \ + +// method non-macros (yes, exists optional macro :D) +#define DEFINE_HASH(__CLASS__) \ + namespace std { \ + template <> \ + struct hash<__CLASS__> \ + { size_t operator()() const { static size_t h = std::hash()(#__CLASS__); return h; } }; } \ + +#define DEFINE_HASH_CUSTOM(__CLASS__, __TYPE__, __VALUE__) \ + namespace std { \ + template <> \ + struct hash<__CLASS__> \ + { size_t operator()() const { static size_t h = std::hash<__TYPE__>()(__VALUE__); return h; } }; } \ + +template +class has_key +{ + typedef char(&yes)[2]; + + template struct Exists; + + template + static yes CheckMember(Exists*); + template + static char CheckMember(...); + +public: + static const bool value = (sizeof(CheckMember(0)) == sizeof(yes)); +}; + +template +class has_instance +{ + typedef char(&yes)[2]; + + template struct Exists; + + template + static yes CheckMember(Exists*); + template + static char CheckMember(...); + +public: + static const bool value = (sizeof(CheckMember(0)) == sizeof(yes)); +}; + +template +struct int_sequence +{ + +}; + +template +struct make_int_sequence : make_int_sequence +{ + +}; + +template +struct make_int_sequence<0, Is...> : int_sequence +{ + +}; + +template +struct placeholder_template +{ + +}; + +namespace std +{ + +template +struct is_placeholder> : integral_constant +{ + +}; + +} + +template +struct seq +{ +}; + +template +struct gens : gens +{ +}; + +template +struct gens<0, Is...> : seq +{ +}; + +namespace dp14 +{ + +template +class hash +{ +public: + template + size_t operator()(Args&&... args) const + { + size_t h = 0; + _hash_forwarding(h, std::forward(args)...); + return h; + } + +protected: + template + void _combine_hash(size_t& seed, U&& x) const + { + seed ^= std::hash()(std::forward(x)) + 0x9e3779b9 + (seed << 6) + (seed >> 2); + } + + template + void _hash_forwarding(size_t& h, U&& parm, Args&&... args) const + { + _combine_hash(h, std::forward(parm)); + _hash_forwarding(h, std::forward(args)...); + } + + template + void _hash_forwarding(size_t& h, U&& parm) const + { + _combine_hash(h, std::forward(parm)); + } +}; + +} + +#endif + diff --git a/node_modules/metacommon/compile.sh b/node_modules/metacommon/compile.sh new file mode 100644 index 0000000..cd0db0b --- /dev/null +++ b/node_modules/metacommon/compile.sh @@ -0,0 +1,3 @@ +#!/bin/bash +echo ok + diff --git a/node_modules/metacommon/package.json b/node_modules/metacommon/package.json new file mode 100644 index 0000000..3aa120f --- /dev/null +++ b/node_modules/metacommon/package.json @@ -0,0 +1,32 @@ +{ + "name": "metacommon", + "version": "1.0.1", + "description": "C++ Header-only repository with macros and very much metaprogramming", + "repository": { + "type": "git", + "url": "git+https://github.com/makiolo/metacommon.git" + }, + "scripts": { + "ci": "curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash", + "docker": "curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh | bash", + "clean": "cmaki clean", + "setup": "echo ok", + "compile": "echo ok", + "install": "echo ok", + "test": "echo ok", + "upload": "cmaki upload" + }, + "keywords": [ + "c++", + "metaprogramming" + ], + "author": "Ricardo Marmolejo García", + "license": "MIT", + "bugs": { + "url": "https://github.com/makiolo/metacommon/issues" + }, + "devDependencies": { + "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" + }, + "homepage": "https://github.com/makiolo/metacommon#readme" +} diff --git a/node_modules/metacommon/setup.sh b/node_modules/metacommon/setup.sh new file mode 100644 index 0000000..cd0db0b --- /dev/null +++ b/node_modules/metacommon/setup.sh @@ -0,0 +1,3 @@ +#!/bin/bash +echo ok + diff --git a/node_modules/micromatch/LICENSE b/node_modules/micromatch/LICENSE new file mode 100755 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/micromatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/micromatch/README.md b/node_modules/micromatch/README.md new file mode 100644 index 0000000..d72a059 --- /dev/null +++ b/node_modules/micromatch/README.md @@ -0,0 +1,1024 @@ +# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Tests](https://github.com/micromatch/micromatch/actions/workflows/test.yml/badge.svg)](https://github.com/micromatch/micromatch/actions/workflows/test.yml) + +> Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Table of Contents + +
+Details + + * [Install](#install) +- [Sponsors](#sponsors) + * [Gold Sponsors](#gold-sponsors) + * [Quickstart](#quickstart) + * [Why use micromatch?](#why-use-micromatch) + + [Matching features](#matching-features) + * [Switching to micromatch](#switching-to-micromatch) + + [From minimatch](#from-minimatch) + + [From multimatch](#from-multimatch) + * [API](#api) + * [Options](#options) + * [Options Examples](#options-examples) + + [options.basename](#optionsbasename) + + [options.bash](#optionsbash) + + [options.expandRange](#optionsexpandrange) + + [options.format](#optionsformat) + + [options.ignore](#optionsignore) + + [options.matchBase](#optionsmatchbase) + + [options.noextglob](#optionsnoextglob) + + [options.nonegate](#optionsnonegate) + + [options.noglobstar](#optionsnoglobstar) + + [options.nonull](#optionsnonull) + + [options.nullglob](#optionsnullglob) + + [options.onIgnore](#optionsonignore) + + [options.onMatch](#optionsonmatch) + + [options.onResult](#optionsonresult) + + [options.posixSlashes](#optionsposixslashes) + + [options.unescape](#optionsunescape) + * [Extended globbing](#extended-globbing) + + [Extglobs](#extglobs) + + [Braces](#braces) + + [Regex character classes](#regex-character-classes) + + [Regex groups](#regex-groups) + + [POSIX bracket expressions](#posix-bracket-expressions) + * [Notes](#notes) + + [Bash 4.3 parity](#bash-43-parity) + + [Backslashes](#backslashes) + * [Benchmarks](#benchmarks) + + [Running benchmarks](#running-benchmarks) + + [Latest results](#latest-results) + * [Contributing](#contributing) + * [About](#about) + +
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save micromatch +``` + +
+ +# Sponsors + +[Become a Sponsor](https://github.com/sponsors/jonschlinkert) to add your logo to this README, or any of [my other projects](https://github.com/jonschlinkert?tab=repositories&q=&type=&language=&sort=stargazers) + +
+ +## Quickstart + +```js +const micromatch = require('micromatch'); +// micromatch(list, patterns[, options]); +``` + +The [main export](#micromatch) takes a list of strings and one or more glob patterns: + +```js +console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['f*', 'b*'])) //=> ['foo', 'bar', 'baz'] +console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['*', '!b*'])) //=> ['foo', 'qux'] +``` + +Use [.isMatch()](#ismatch) to for boolean matching: + +```js +console.log(micromatch.isMatch('foo', 'f*')) //=> true +console.log(micromatch.isMatch('foo', ['b*', 'f*'])) //=> true +``` + +[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! + +
+ +## Why use micromatch? + +> micromatch is a [replacement](#switching-to-micromatch) for minimatch and multimatch + +* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) +* More complete support for the Bash 4.3 specification than minimatch and multimatch. Micromatch passes _all of the spec tests_ from bash, including some that bash still fails. +* **Fast & Performant** - Loads in about 5ms and performs [fast matches](#benchmarks). +* **Glob matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories +* **[Advanced globbing](#extended-globbing)** - Supports [extglobs](#extglobs), [braces](#braces-1), and [POSIX brackets](#posix-bracket-expressions), and support for escaping special characters with `\` or quotes. +* **Accurate** - Covers more scenarios [than minimatch](https://github.com/yarnpkg/yarn/pull/3339) +* **Well tested** - More than 5,000 [test assertions](./test) +* **Windows support** - More reliable windows support than minimatch and multimatch. +* **[Safe](https://github.com/micromatch/braces#braces-is-safe)** - Micromatch is not subject to DoS with brace patterns like minimatch and multimatch. + +### Matching features + +* Support for multiple glob patterns (no need for wrappers like multimatch) +* Wildcards (`**`, `*.js`) +* Negation (`'!a/*.js'`, `'*!(b).js'`) +* [extglobs](#extglobs) (`+(x|y)`, `!(a|b)`) +* [POSIX character classes](#posix-bracket-expressions) (`[[:alpha:][:digit:]]`) +* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) +* regex character classes (`foo-[1-5].js`) +* regex logical "or" (`foo/(abc|xyz).js`) + +You can mix and match these features to create whatever patterns you need! + +## Switching to micromatch + +_(There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information.)_ + +### From minimatch + +Use [micromatch.isMatch()](#ismatch) instead of `minimatch()`: + +```js +console.log(micromatch.isMatch('foo', 'b*')); //=> false +``` + +Use [micromatch.match()](#match) instead of `minimatch.match()`: + +```js +console.log(micromatch.match(['foo', 'bar'], 'b*')); //=> 'bar' +``` + +### From multimatch + +Same signature: + +```js +console.log(micromatch(['foo', 'bar', 'baz'], ['f*', '*z'])); //=> ['foo', 'baz'] +``` + +## API + +**Params** + +* `list` **{String|Array}**: List of strings to match. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +const mm = require('micromatch'); +// mm(list, patterns[, options]); + +console.log(mm(['a.js', 'a.txt'], ['*.js'])); +//=> [ 'a.js' ] +``` + +### [.matcher](index.js#L109) + +Returns a matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. + +**Params** + +* `pattern` **{String}**: Glob pattern +* `options` **{Object}** +* `returns` **{Function}**: Returns a matcher function. + +**Example** + +```js +const mm = require('micromatch'); +// mm.matcher(pattern[, options]); + +const isMatch = mm.matcher('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +### [.isMatch](index.js#L128) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* `str` **{String}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `[options]` **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const mm = require('micromatch'); +// mm.isMatch(string, patterns[, options]); + +console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(mm.isMatch('a.a', 'b.*')); //=> false +``` + +### [.not](index.js#L153) + +Returns a list of strings that _**do not match any**_ of the given `patterns`. + +**Params** + +* `list` **{Array}**: Array of strings to match. +* `patterns` **{String|Array}**: One or more glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. + +**Example** + +```js +const mm = require('micromatch'); +// mm.not(list, patterns[, options]); + +console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); +//=> ['b.b', 'c.c'] +``` + +### [.contains](index.js#L193) + +Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. + +**Params** + +* `str` **{String}**: The string to match. +* `patterns` **{String|Array}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any of the patterns matches any part of `str`. + +**Example** + +```js +var mm = require('micromatch'); +// mm.contains(string, pattern[, options]); + +console.log(mm.contains('aa/bb/cc', '*b')); +//=> true +console.log(mm.contains('aa/bb/cc', '*d')); +//=> false +``` + +### [.matchKeys](index.js#L235) + +Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. + +**Params** + +* `object` **{Object}**: The object with keys to filter. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Object}**: Returns an object with only keys that match the given patterns. + +**Example** + +```js +const mm = require('micromatch'); +// mm.matchKeys(object, patterns[, options]); + +const obj = { aa: 'a', ab: 'b', ac: 'c' }; +console.log(mm.matchKeys(obj, '*b')); +//=> { ab: 'b' } +``` + +### [.some](index.js#L264) + +Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any `patterns` matches any of the strings in `list` + +**Example** + +```js +const mm = require('micromatch'); +// mm.some(list, patterns[, options]); + +console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// true +console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.every](index.js#L300) + +Returns true if every string in the given `list` matches any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if all `patterns` matches all of the strings in `list` + +**Example** + +```js +const mm = require('micromatch'); +// mm.every(list, patterns[, options]); + +console.log(mm.every('foo.js', ['foo.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// false +console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.all](index.js#L339) + +Returns true if **all** of the given `patterns` match the specified string. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const mm = require('micromatch'); +// mm.all(string, patterns[, options]); + +console.log(mm.all('foo.js', ['foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', '!foo.js'])); +// false + +console.log(mm.all('foo.js', ['*.js', 'foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); +// true +``` + +### [.capture](index.js#L366) + +Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. + +**Params** + +* `glob` **{String}**: Glob pattern to use for matching. +* `input` **{String}**: String to match +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array|null}**: Returns an array of captures if the input matches the glob pattern, otherwise `null`. + +**Example** + +```js +const mm = require('micromatch'); +// mm.capture(pattern, string[, options]); + +console.log(mm.capture('test/*.js', 'test/foo.js')); +//=> ['foo'] +console.log(mm.capture('test/*.js', 'foo/bar.css')); +//=> null +``` + +### [.makeRe](index.js#L392) + +Create a regular expression from the given glob `pattern`. + +**Params** + +* `pattern` **{String}**: A glob pattern to convert to regex. +* `options` **{Object}** +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const mm = require('micromatch'); +// mm.makeRe(pattern[, options]); + +console.log(mm.makeRe('*.js')); +//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ +``` + +### [.scan](index.js#L408) + +Scan a glob pattern to separate the pattern into segments. Used by the [split](#split) method. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const mm = require('micromatch'); +const state = mm.scan(pattern[, options]); +``` + +### [.parse](index.js#L424) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `glob` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as regex source string. + +**Example** + +```js +const mm = require('micromatch'); +const state = mm.parse(pattern[, options]); +``` + +### [.braces](index.js#L451) + +Process the given brace `pattern`. + +**Params** + +* `pattern` **{String}**: String with brace pattern to process. +* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. +* `returns` **{Array}** + +**Example** + +```js +const { braces } = require('micromatch'); +console.log(braces('foo/{a,b,c}/bar')); +//=> [ 'foo/(a|b|c)/bar' ] + +console.log(braces('foo/{a,b,c}/bar', { expand: true })); +//=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] +``` + +## Options + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Match dotfiles. Otherwise dotfiles are ignored unless a `.` is explicitly defined in the pattern. | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. This option is overridden by the `expandBrace` option. | +| `failglob` | `boolean` | `false` | Similar to the `failglob` behavior in Bash, throws an error when no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `boolean` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `lookbehinds` | `boolean` | `true` | Support regex positive and negative lookbehinds. Note that you must be using Node 8.1.10 or higher to enable regex lookbehinds. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Perform case-insensitive matching. Equivalent to the regex `i` flag. Note that this option is ignored when the `flags` option is defined. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with [extglobs](#extglobs) (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support [POSIX character classes](#posix-bracket-expressions) ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `string` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove preceding backslashes from escaped glob characters before creating the regular expression to perform matches. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatitibility. | + +## Options Examples + +### options.basename + +Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. + +**Type**: `Boolean` + +**Default**: `false` + +**Example** + +```js +micromatch(['a/b.js', 'a/c.md'], '*.js'); +//=> [] + +micromatch(['a/b.js', 'a/c.md'], '*.js', { basename: true }); +//=> ['a/b.js'] +``` + +### options.bash + +Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as any other star. + +**Type**: `Boolean` + +**Default**: `true` + +**Example** + +```js +const files = ['abc', 'ajz']; +console.log(micromatch(files, '[a-c]*')); +//=> ['abc', 'ajz'] + +console.log(micromatch(files, '[a-c]*', { bash: false })); +``` + +### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a numeric folder name between `01` and `25`, with leading zeros. + +```js +const fill = require('fill-range'); +const regex = micromatch.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex) +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')) //=> true +``` + +### options.ignore + +String or array of glob patterns to match files to ignore. + +**Type**: `String|Array` + +**Default**: `undefined` + +```js +const isMatch = micromatch.matcher('*', { ignore: 'f*' }); +console.log(isMatch('foo')) //=> false +console.log(isMatch('bar')) //=> true +console.log(isMatch('baz')) //=> true +``` + +### options.matchBase + +Alias for [options.basename](#options-basename). + +### options.noextglob + +Disable extglob support, so that [extglobs](#extglobs) are regarded as literal characters. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Examples** + +```js +console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)')); +//=> ['a/b', 'a/!(z)'] + +console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', { noextglob: true })); +//=> ['a/!(z)'] (matches only as literal characters) +``` + +### options.nonegate + +Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.noglobstar + +Disable matching with globstars (`**`). + +**Type**: `Boolean` + +**Default**: `undefined` + +```js +micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); +//=> ['a/b', 'a/b/c', 'a/b/c/d'] + +micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); +//=> ['a/b'] +``` + +### options.nonull + +Alias for [options.nullglob](#options-nullglob). + +### options.nullglob + +If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); + // { glob: '*', regex: /^(?:(?!\.)(?=.)[^\/]*?\/?)$/, input: 'foo', output: 'foo' } +}; + +const isMatch = micromatch.matcher('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ input, output }); + // { input: 'some\\path', output: 'some/path' } + // { input: 'some\\path', output: 'some/path' } + // { input: 'some\\path', output: 'some/path' } +}; + +const isMatch = micromatch.matcher('**', { onMatch, posixSlashes: true }); +isMatch('some\\path'); +isMatch('some\\path'); +isMatch('some\\path'); +``` + +### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = micromatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +### options.posixSlashes + +Convert path separators on returned files to posix/unix-style forward slashes. Aliased as `unixify` for backwards compatibility. + +**Type**: `Boolean` + +**Default**: `true` on windows, `false` everywhere else. + +**Example** + +```js +console.log(micromatch.match(['a\\b\\c'], 'a/**')); +//=> ['a/b/c'] + +console.log(micromatch.match(['a\\b\\c'], { posixSlashes: false })); +//=> ['a\\b\\c'] +``` + +### options.unescape + +Remove backslashes from escaped glob characters before creating the regular expression to perform matches. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Example** + +In this example we want to match a literal `*`: + +```js +console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c')); +//=> ['a\\*c'] + +console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c', { unescape: true })); +//=> ['a*c'] +``` + +
+
+ +## Extended globbing + +Micromatch supports the following extended globbing features. + +### Extglobs + +Extended globbing, as described by the bash man page: + +| **pattern** | **regex equivalent** | **description** | +| --- | --- | --- | +| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | +| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | +| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | +| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | +| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | + +* Note that `@` isn't a regex character. + +### Braces + +Brace patterns can be used to match specific ranges or sets of characters. + +**Example** + +The pattern `{f,b}*/{1..3}/{b,q}*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. + +### Regex character classes + +Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` +* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` +* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` + +Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). + +### Regex groups + +Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` +* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` +* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` + +As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. + +### POSIX bracket expressions + +POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. + +**Example** + +```js +console.log(micromatch.isMatch('a1', '[[:alpha:][:digit:]]')) //=> true +console.log(micromatch.isMatch('a1', '[[:alpha:][:alpha:]]')) //=> false +``` + +*** + +## Notes + +### Bash 4.3 parity + +Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. + +However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. + +### Backslashes + +There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. + +* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows, which is consistent with bash behavior. _More importantly, unescaping globs can result in unsafe regular expressions_. +* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. + +We made this decision for micromatch for a couple of reasons: + +* Consistency with bash conventions. +* Glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. + +**A note about joining paths to globs** + +Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. + +In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. + +To solve this, you might be inspired to do something like `'foo\\*'.replace(/\\/g, '/')`, but this causes another, potentially much more serious, problem. + +## Benchmarks + +### Running benchmarks + +Install dependencies for running benchmarks: + +```sh +$ cd bench && npm install +``` + +Run the benchmarks: + +```sh +$ npm run bench +``` + +### Latest results + +As of August 23, 2024 (longer bars are better): + +```sh +# .makeRe star + micromatch x 2,232,802 ops/sec ±2.34% (89 runs sampled)) + minimatch x 781,018 ops/sec ±6.74% (92 runs sampled)) + +# .makeRe star; dot=true + micromatch x 1,863,453 ops/sec ±0.74% (93 runs sampled) + minimatch x 723,105 ops/sec ±0.75% (93 runs sampled) + +# .makeRe globstar + micromatch x 1,624,179 ops/sec ±2.22% (91 runs sampled) + minimatch x 1,117,230 ops/sec ±2.78% (86 runs sampled)) + +# .makeRe globstars + micromatch x 1,658,642 ops/sec ±0.86% (92 runs sampled) + minimatch x 741,224 ops/sec ±1.24% (89 runs sampled)) + +# .makeRe with leading star + micromatch x 1,525,014 ops/sec ±1.63% (90 runs sampled) + minimatch x 561,074 ops/sec ±3.07% (89 runs sampled) + +# .makeRe - braces + micromatch x 172,478 ops/sec ±2.37% (78 runs sampled) + minimatch x 96,087 ops/sec ±2.34% (88 runs sampled))) + +# .makeRe braces - range (expanded) + micromatch x 26,973 ops/sec ±0.84% (89 runs sampled) + minimatch x 3,023 ops/sec ±0.99% (90 runs sampled)) + +# .makeRe braces - range (compiled) + micromatch x 152,892 ops/sec ±1.67% (83 runs sampled) + minimatch x 992 ops/sec ±3.50% (89 runs sampled)d)) + +# .makeRe braces - nested ranges (expanded) + micromatch x 15,816 ops/sec ±13.05% (80 runs sampled) + minimatch x 2,953 ops/sec ±1.64% (91 runs sampled) + +# .makeRe braces - nested ranges (compiled) + micromatch x 110,881 ops/sec ±1.85% (82 runs sampled) + minimatch x 1,008 ops/sec ±1.51% (91 runs sampled) + +# .makeRe braces - set (compiled) + micromatch x 134,930 ops/sec ±3.54% (63 runs sampled)) + minimatch x 43,242 ops/sec ±0.60% (93 runs sampled) + +# .makeRe braces - nested sets (compiled) + micromatch x 94,455 ops/sec ±1.74% (69 runs sampled)) + minimatch x 27,720 ops/sec ±1.84% (93 runs sampled)) +``` + +## Contributing + +All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. + +**Bug reports** + +Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: + +* [research existing issues first](../../issues) (open and closed) +* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern +* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js +* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. + +**Platform issues** + +It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") +* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/micromatch/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") +* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 523 | [jonschlinkert](https://github.com/jonschlinkert) | +| 12 | [es128](https://github.com/es128) | +| 9 | [danez](https://github.com/danez) | +| 8 | [doowb](https://github.com/doowb) | +| 6 | [paulmillr](https://github.com/paulmillr) | +| 5 | [mrmlnc](https://github.com/mrmlnc) | +| 3 | [DrPizza](https://github.com/DrPizza) | +| 2 | [Tvrqvoise](https://github.com/Tvrqvoise) | +| 2 | [antonyk](https://github.com/antonyk) | +| 2 | [MartinKolarik](https://github.com/MartinKolarik) | +| 2 | [Glazy](https://github.com/Glazy) | +| 2 | [mceIdo](https://github.com/mceIdo) | +| 2 | [TrySound](https://github.com/TrySound) | +| 1 | [yvele](https://github.com/yvele) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | +| 1 | [simlu](https://github.com/simlu) | +| 1 | [curbengh](https://github.com/curbengh) | +| 1 | [fidian](https://github.com/fidian) | +| 1 | [tomByrer](https://github.com/tomByrer) | +| 1 | [ZoomerTedJackson](https://github.com/ZoomerTedJackson) | +| 1 | [styfle](https://github.com/styfle) | +| 1 | [sebdeckers](https://github.com/sebdeckers) | +| 1 | [muescha](https://github.com/muescha) | +| 1 | [juszczykjakub](https://github.com/juszczykjakub) | +| 1 | [joyceerhl](https://github.com/joyceerhl) | +| 1 | [donatj](https://github.com/donatj) | +| 1 | [frangio](https://github.com/frangio) | +| 1 | [UltCombo](https://github.com/UltCombo) | +| 1 | [DianeLooney](https://github.com/DianeLooney) | +| 1 | [devongovett](https://github.com/devongovett) | +| 1 | [Cslove](https://github.com/Cslove) | +| 1 | [amilajack](https://github.com/amilajack) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2024, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on August 23, 2024._ \ No newline at end of file diff --git a/node_modules/micromatch/index.js b/node_modules/micromatch/index.js new file mode 100644 index 0000000..cb9d9ef --- /dev/null +++ b/node_modules/micromatch/index.js @@ -0,0 +1,474 @@ +'use strict'; + +const util = require('util'); +const braces = require('braces'); +const picomatch = require('picomatch'); +const utils = require('picomatch/lib/utils'); + +const isEmptyString = v => v === '' || v === './'; +const hasBraces = v => { + const index = v.indexOf('{'); + return index > -1 && v.indexOf('}', index) > -1; +}; + +/** + * Returns an array of strings that match one or more glob patterns. + * + * ```js + * const mm = require('micromatch'); + * // mm(list, patterns[, options]); + * + * console.log(mm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {String|Array} `list` List of strings to match. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +const micromatch = (list, patterns, options) => { + patterns = [].concat(patterns); + list = [].concat(list); + + let omit = new Set(); + let keep = new Set(); + let items = new Set(); + let negatives = 0; + + let onResult = state => { + items.add(state.output); + if (options && options.onResult) { + options.onResult(state); + } + }; + + for (let i = 0; i < patterns.length; i++) { + let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); + let negated = isMatch.state.negated || isMatch.state.negatedExtglob; + if (negated) negatives++; + + for (let item of list) { + let matched = isMatch(item, true); + + let match = negated ? !matched.isMatch : matched.isMatch; + if (!match) continue; + + if (negated) { + omit.add(matched.output); + } else { + omit.delete(matched.output); + keep.add(matched.output); + } + } + } + + let result = negatives === patterns.length ? [...items] : [...keep]; + let matches = result.filter(item => !omit.has(item)); + + if (options && matches.length === 0) { + if (options.failglob === true) { + throw new Error(`No matches found for "${patterns.join(', ')}"`); + } + + if (options.nonull === true || options.nullglob === true) { + return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; + } + } + + return matches; +}; + +/** + * Backwards compatibility + */ + +micromatch.match = micromatch; + +/** + * Returns a matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * const mm = require('micromatch'); + * // mm.matcher(pattern[, options]); + * + * const isMatch = mm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` + * @return {Function} Returns a matcher function. + * @api public + */ + +micromatch.matcher = (pattern, options) => picomatch(pattern, options); + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const mm = require('micromatch'); + * // mm.isMatch(string, patterns[, options]); + * + * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(mm.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `[options]` See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Backwards compatibility + */ + +micromatch.any = micromatch.isMatch; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.not(list, patterns[, options]); + * + * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +micromatch.not = (list, patterns, options = {}) => { + patterns = [].concat(patterns).map(String); + let result = new Set(); + let items = []; + + let onResult = state => { + if (options.onResult) options.onResult(state); + items.push(state.output); + }; + + let matches = new Set(micromatch(list, patterns, { ...options, onResult })); + + for (let item of items) { + if (!matches.has(item)) { + result.add(item); + } + } + return [...result]; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var mm = require('micromatch'); + * // mm.contains(string, pattern[, options]); + * + * console.log(mm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(mm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any of the patterns matches any part of `str`. + * @api public + */ + +micromatch.contains = (str, pattern, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + if (Array.isArray(pattern)) { + return pattern.some(p => micromatch.contains(str, p, options)); + } + + if (typeof pattern === 'string') { + if (isEmptyString(str) || isEmptyString(pattern)) { + return false; + } + + if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { + return true; + } + } + + return micromatch.isMatch(str, pattern, { ...options, contains: true }); +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * const mm = require('micromatch'); + * // mm.matchKeys(object, patterns[, options]); + * + * const obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(mm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +micromatch.matchKeys = (obj, patterns, options) => { + if (!utils.isObject(obj)) { + throw new TypeError('Expected the first argument to be an object'); + } + let keys = micromatch(Object.keys(obj), patterns, options); + let res = {}; + for (let key of keys) res[key] = obj[key]; + return res; +}; + +/** + * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.some(list, patterns[, options]); + * + * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` + * @api public + */ + +micromatch.some = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (items.some(item => isMatch(item))) { + return true; + } + } + return false; +}; + +/** + * Returns true if every string in the given `list` matches + * any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.every(list, patterns[, options]); + * + * console.log(mm.every('foo.js', ['foo.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` + * @api public + */ + +micromatch.every = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (!items.every(item => isMatch(item))) { + return false; + } + } + return true; +}; + +/** + * Returns true if **all** of the given `patterns` match + * the specified string. + * + * ```js + * const mm = require('micromatch'); + * // mm.all(string, patterns[, options]); + * + * console.log(mm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.all = (str, patterns, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + return [].concat(patterns).every(p => picomatch(p, options)(str)); +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. + * + * ```js + * const mm = require('micromatch'); + * // mm.capture(pattern, string[, options]); + * + * console.log(mm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(mm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `glob` Glob pattern to use for matching. + * @param {String} `input` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. + * @api public + */ + +micromatch.capture = (glob, input, options) => { + let posix = utils.isWindows(options); + let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); + let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); + + if (match) { + return match.slice(1).map(v => v === void 0 ? '' : v); + } +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * const mm = require('micromatch'); + * // mm.makeRe(pattern[, options]); + * + * console.log(mm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +micromatch.makeRe = (...args) => picomatch.makeRe(...args); + +/** + * Scan a glob pattern to separate the pattern into segments. Used + * by the [split](#split) method. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.scan(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +micromatch.scan = (...args) => picomatch.scan(...args); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.parse(pattern[, options]); + * ``` + * @param {String} `glob` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as regex source string. + * @api public + */ + +micromatch.parse = (patterns, options) => { + let res = []; + for (let pattern of [].concat(patterns || [])) { + for (let str of braces(String(pattern), options)) { + res.push(picomatch.parse(str, options)); + } + } + return res; +}; + +/** + * Process the given brace `pattern`. + * + * ```js + * const { braces } = require('micromatch'); + * console.log(braces('foo/{a,b,c}/bar')); + * //=> [ 'foo/(a|b|c)/bar' ] + * + * console.log(braces('foo/{a,b,c}/bar', { expand: true })); + * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] + * ``` + * @param {String} `pattern` String with brace pattern to process. + * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. + * @return {Array} + * @api public + */ + +micromatch.braces = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + if ((options && options.nobrace === true) || !hasBraces(pattern)) { + return [pattern]; + } + return braces(pattern, options); +}; + +/** + * Expand braces + */ + +micromatch.braceExpand = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + return micromatch.braces(pattern, { ...options, expand: true }); +}; + +/** + * Expose micromatch + */ + +// exposed for tests +micromatch.hasBraces = hasBraces; +module.exports = micromatch; diff --git a/node_modules/micromatch/package.json b/node_modules/micromatch/package.json new file mode 100644 index 0000000..d5558bb --- /dev/null +++ b/node_modules/micromatch/package.json @@ -0,0 +1,119 @@ +{ + "name": "micromatch", + "description": "Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch.", + "version": "4.0.8", + "homepage": "https://github.com/micromatch/micromatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "(https://github.com/DianeLooney)", + "Amila Welihinda (amilajack.com)", + "Bogdan Chadkin (https://github.com/TrySound)", + "Brian Woodward (https://twitter.com/doowb)", + "Devon Govett (http://badassjs.com)", + "Elan Shanker (https://github.com/es128)", + "Fabrício Matté (https://ultcombo.js.org)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Martin Kolárik (https://kolarik.sk)", + "Olsten Larck (https://i.am.charlike.online)", + "Paul Miller (paulmillr.com)", + "Tom Byrer (https://github.com/tomByrer)", + "Tyler Akins (http://rumkin.com)", + "Peter Bright (https://github.com/drpizza)", + "Kuba Juszczyk (https://github.com/ku8ar)" + ], + "repository": "micromatch/micromatch", + "bugs": { + "url": "https://github.com/micromatch/micromatch/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.6" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "devDependencies": { + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "minimatch": "^5.0.1", + "mocha": "^9.2.2", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "bash", + "bracket", + "character-class", + "expand", + "expansion", + "expression", + "extglob", + "extglobs", + "file", + "files", + "filter", + "find", + "glob", + "globbing", + "globs", + "globstar", + "lookahead", + "lookaround", + "lookbehind", + "match", + "matcher", + "matches", + "matching", + "micromatch", + "minimatch", + "multimatch", + "negate", + "negation", + "path", + "pattern", + "patterns", + "posix", + "regex", + "regexp", + "regular", + "shell", + "star", + "wildcard" + ], + "verb": { + "toc": "collapsible", + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "nanomatch" + ] + }, + "reflinks": [ + "extglob", + "fill-range", + "glob-object", + "minimatch", + "multimatch" + ] + } +} diff --git a/node_modules/mimic-fn/index.d.ts b/node_modules/mimic-fn/index.d.ts new file mode 100644 index 0000000..b4047d5 --- /dev/null +++ b/node_modules/mimic-fn/index.d.ts @@ -0,0 +1,54 @@ +declare const mimicFn: { + /** + Make a function mimic another one. It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + + @param to - Mimicking function. + @param from - Function to mimic. + @returns The modified `to` function. + + @example + ``` + import mimicFn = require('mimic-fn'); + + function foo() {} + foo.unicorn = '🦄'; + + function wrapper() { + return foo(); + } + + console.log(wrapper.name); + //=> 'wrapper' + + mimicFn(wrapper, foo); + + console.log(wrapper.name); + //=> 'foo' + + console.log(wrapper.unicorn); + //=> '🦄' + ``` + */ + < + ArgumentsType extends unknown[], + ReturnType, + FunctionType extends (...arguments: ArgumentsType) => ReturnType + >( + to: (...arguments: ArgumentsType) => ReturnType, + from: FunctionType + ): FunctionType; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function mimicFn< + // ArgumentsType extends unknown[], + // ReturnType, + // FunctionType extends (...arguments: ArgumentsType) => ReturnType + // >( + // to: (...arguments: ArgumentsType) => ReturnType, + // from: FunctionType + // ): FunctionType; + // export = mimicFn; + default: typeof mimicFn; +}; + +export = mimicFn; diff --git a/node_modules/mimic-fn/index.js b/node_modules/mimic-fn/index.js new file mode 100644 index 0000000..1a59705 --- /dev/null +++ b/node_modules/mimic-fn/index.js @@ -0,0 +1,13 @@ +'use strict'; + +const mimicFn = (to, from) => { + for (const prop of Reflect.ownKeys(from)) { + Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop)); + } + + return to; +}; + +module.exports = mimicFn; +// TODO: Remove this for the next major release +module.exports.default = mimicFn; diff --git a/node_modules/mimic-fn/license b/node_modules/mimic-fn/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/mimic-fn/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mimic-fn/package.json b/node_modules/mimic-fn/package.json new file mode 100644 index 0000000..199d2c7 --- /dev/null +++ b/node_modules/mimic-fn/package.json @@ -0,0 +1,42 @@ +{ + "name": "mimic-fn", + "version": "2.1.0", + "description": "Make a function mimic another one", + "license": "MIT", + "repository": "sindresorhus/mimic-fn", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "function", + "mimic", + "imitate", + "rename", + "copy", + "inherit", + "properties", + "name", + "func", + "fn", + "set", + "infer", + "change" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/mimic-fn/readme.md b/node_modules/mimic-fn/readme.md new file mode 100644 index 0000000..0ef8a13 --- /dev/null +++ b/node_modules/mimic-fn/readme.md @@ -0,0 +1,69 @@ +# mimic-fn [![Build Status](https://travis-ci.org/sindresorhus/mimic-fn.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-fn) + +> Make a function mimic another one + +Useful when you wrap a function in another function and like to preserve the original name and other properties. + + +## Install + +``` +$ npm install mimic-fn +``` + + +## Usage + +```js +const mimicFn = require('mimic-fn'); + +function foo() {} +foo.unicorn = '🦄'; + +function wrapper() { + return foo(); +} + +console.log(wrapper.name); +//=> 'wrapper' + +mimicFn(wrapper, foo); + +console.log(wrapper.name); +//=> 'foo' + +console.log(wrapper.unicorn); +//=> '🦄' +``` + + +## API + +It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + +### mimicFn(to, from) + +Modifies the `to` function and returns it. + +#### to + +Type: `Function` + +Mimicking function. + +#### from + +Type: `Function` + +Function to mimic. + + +## Related + +- [rename-fn](https://github.com/sindresorhus/rename-fn) - Rename a function +- [keep-func-props](https://github.com/ehmicky/keep-func-props) - Wrap a function without changing its name, length and other properties + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/.travis.yml b/node_modules/npm-mas-mas/.travis.yml new file mode 100644 index 0000000..5c04817 --- /dev/null +++ b/node_modules/npm-mas-mas/.travis.yml @@ -0,0 +1,15 @@ +language: c +services: docker +os: linux +env: + - PACKAGE=python TARGET=linux64 MODE=Debug + - PACKAGE=dune-freetype TARGET=linux64 MODE=Debug + - PACKAGE=haxx-libcurl TARGET=linux64 MODE=Debug + - PACKAGE=fmod TARGET=linux64 MODE=Debug + - PACKAGE=intel-tbb TARGET=linux64 MODE=Debug + - PACKAGE=cryptopp TARGET=linux64 MODE=Debug + - PACKAGE=ois TARGET=linux64 MODE=Debug + - PACKAGE=bullet2 TARGET=linux64 MODE=Debug +script: + - PACKAGE=$PACKAGE make $TARGET + diff --git a/node_modules/npm-mas-mas/LICENSE b/node_modules/npm-mas-mas/LICENSE new file mode 100644 index 0000000..9e0fb24 --- /dev/null +++ b/node_modules/npm-mas-mas/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/Makefile b/node_modules/npm-mas-mas/Makefile new file mode 100644 index 0000000..bfa5464 --- /dev/null +++ b/node_modules/npm-mas-mas/Makefile @@ -0,0 +1,21 @@ +PACKAGE ?= . +MODE ?= Debug + +all: clean build + +build: + (cd cmaki_identifier && npm install --unsafe-perm) + (cd cmaki_generator && ./build ${PACKAGE} -d) + +clean: + (cd cmaki_identifier && rm -Rf bin rm -Rf artifacts) + +linux64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} linux64 make + +windows64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} windows64 make + +android64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} android64 make + diff --git a/node_modules/npm-mas-mas/README b/node_modules/npm-mas-mas/README new file mode 100644 index 0000000..75a7863 --- /dev/null +++ b/node_modules/npm-mas-mas/README @@ -0,0 +1,57 @@ +# fusion projects in one product + +- cmaki +- cmaki_scripts +- cmaki_identifier +- cmaki_docker +- cmaki_generator +- servfactor + +# variables de entorno +- Servidor de artefactos: +- NPP_SERVER = htpp://.... + +- Modo de compilación: +- NPP_MODE = Debug, Release .... + +- Directorio de instalación: +- NPP_INSTALL + +- Utilizar artefactos cacheados o compilar siempre: +- NPP_CACHE=TRUE/FALSE + + + + + + +refactor cmake +----------------------------------- + +cmaki_library ---------> npp_shared +cmaki_static_library --> npp_static +cmaki_executable ------> npp_executable +cmaki_test ------------> npp_test +cmaki_google_test -----> npp_google_test +cmaki_python_test -----> npp_python_test + + + +Comandos uso +------------ +npm install +npm test + +npm run create # crear package +npm run upload # subir package + + +windows environment +------------------ +visual studio 2019 +mini conda +npm +cmake +pip install conan +chocolatey +choco install tortoisegit diff --git a/node_modules/npm-mas-mas/cmaki/.travis.yml b/node_modules/npm-mas-mas/cmaki/.travis.yml new file mode 100644 index 0000000..44de95c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/.travis.yml @@ -0,0 +1,5 @@ +language: c +services: docker +os: linux +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki/GitUtils.cmake b/node_modules/npm-mas-mas/cmaki/GitUtils.cmake new file mode 100644 index 0000000..4bfc61e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/GitUtils.cmake @@ -0,0 +1,157 @@ +cmake_minimum_required(VERSION 2.8.7) + +include("${CMAKE_CURRENT_LIST_DIR}/Utils.cmake") +include(CMakeParseArguments) + +find_package(Git) +if(NOT GIT_FOUND) + message(FATAL_ERROR "git not found!") +endif() + + +# clone a git repo into a directory at configure time +# this can be useful for including cmake-library projects that contain *.cmake files +# the function will automatically init git submodules too +# +# ATTENTION: CMakeLists-files in the cloned repo will NOT be build automatically +# +# why not use ExternalProject_Add you ask? because we need to run this at configure time +# +# USAGE: +# git_clone( +# PROJECT_NAME +# GIT_URL +# [GIT_TAG|GIT_BRANCH|GIT_COMMIT ] +# [DIRECTORY ] +# [QUIET] +# ) +# +# +# ARGUMENTS: +# PROJECT_NAME +# name of the project that will be used in output variables. +# must be the same as the git directory/repo name +# +# GIT_URL +# url to the git repo +# +# GIT_TAG|GIT_BRANCH|GIT_COMMIT +# optional +# the tag/branch/commit to checkout +# default is master +# +# DIRECTORY +# optional +# the directory the project will be cloned into +# default is the build directory, similar to ExternalProject (${CMAKE_BINARY_DIR}) +# +# QUIET +# optional +# don't print status messages +# +# +# OUTPUT VARIABLES: +# _SOURCE_DIR +# top level source directory of the cloned project +# +# +# EXAMPLE: +# git_clone( +# PROJECT_NAME testProj +# GIT_URL https://github.com/test/test.git +# GIT_COMMIT a1b2c3 +# DIRECTORY ${CMAKE_BINARY_DIR} +# QUIET +# ) +# +# include(${testProj_SOURCE_DIR}/cmake/myFancyLib.cmake) + +function(cmaki_git_clone) + + cmake_parse_arguments( + PARGS # prefix of output variables + "QUIET" # list of names of the boolean arguments (only defined ones will be true) + "PROJECT_NAME;GIT_URL;GIT_TAG;GIT_BRANCH;GIT_COMMIT;DIRECTORY" # list of names of mono-valued arguments + "" # list of names of multi-valued arguments (output variables are lists) + ${ARGN} # arguments of the function to parse, here we take the all original ones + ) # remaining unparsed arguments can be found in PARGS_UNPARSED_ARGUMENTS + + if(NOT PARGS_PROJECT_NAME) + message(FATAL_ERROR "You must provide a project name") + endif() + + if(NOT PARGS_GIT_URL) + message(FATAL_ERROR "You must provide a git url") + endif() + + if(NOT PARGS_DIRECTORY) + set(PARGS_DIRECTORY ${CMAKE_BINARY_DIR}) + endif() + + set(${PARGS_PROJECT_NAME}_SOURCE_DIR + ${PARGS_DIRECTORY}/${PARGS_PROJECT_NAME} + CACHE INTERNAL "" FORCE) # makes var visible everywhere because PARENT_SCOPE wouldn't include this scope + + set(SOURCE_DIR ${PARGS_PROJECT_NAME}_SOURCE_DIR) + + # check that only one of GIT_TAG xor GIT_BRANCH xor GIT_COMMIT was passed + at_most_one(at_most_one_tag ${PARGS_GIT_TAG} ${PARGS_GIT_BRANCH} ${PARGS_GIT_COMMIT}) + + if(NOT at_most_one_tag) + message(FATAL_ERROR "you can only provide one of GIT_TAG, GIT_BRANCH or GIT_COMMIT") + endif() + + if(NOT PARGS_QUIET) + message(STATUS "downloading/updating ${PARGS_PROJECT_NAME}") + endif() + + # first clone the repo + if(EXISTS ${${SOURCE_DIR}}) + if(NOT PARGS_QUIET) + message(STATUS "${PARGS_PROJECT_NAME} directory found, pulling...") + endif() + + execute_process( + COMMAND ${GIT_EXECUTABLE} pull origin master + COMMAND ${GIT_EXECUTABLE} submodule update --remote + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + else() + if(NOT PARGS_QUIET) + message(STATUS "${PARGS_PROJECT_NAME} directory not found, cloning...") + endif() + + execute_process( + COMMAND ${GIT_EXECUTABLE} clone ${PARGS_GIT_URL} --recursive + WORKING_DIRECTORY ${PARGS_DIRECTORY} + OUTPUT_VARIABLE git_output) + endif() + + if(NOT PARGS_QUIET) + message("${git_output}") + endif() + + # now checkout the right commit + if(PARGS_GIT_TAG) + execute_process( + COMMAND ${GIT_EXECUTABLE} fetch --all --tags --prune + COMMAND ${GIT_EXECUTABLE} checkout tags/${PARGS_GIT_TAG} -b tag_${PARGS_GIT_TAG} + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + elseif(PARGS_GIT_BRANCH OR PARGS_GIT_COMMIT) + execute_process( + COMMAND ${GIT_EXECUTABLE} checkout ${PARGS_GIT_BRANCH} + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + else() + message(STATUS "no tag specified, defaulting to master") + execute_process( + COMMAND ${GIT_EXECUTABLE} checkout master + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + endif() + + if(NOT PARGS_QUIET) + message("${git_output}") + endif() +endfunction() diff --git a/node_modules/npm-mas-mas/cmaki/LICENSE b/node_modules/npm-mas-mas/cmaki/LICENSE new file mode 100644 index 0000000..7e79e4d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/npm-mas-mas/cmaki/README.md b/node_modules/npm-mas-mas/cmaki/README.md new file mode 100644 index 0000000..9d7b1b0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/README.md @@ -0,0 +1,4 @@ +# :construction: I am under construction [![npm version](https://badge.fury.io/js/cmaki.svg)](https://badge.fury.io/js/cmaki) +Don't use it [![Build Status](https://travis-ci.org/makiolo/cmaki.svg?branch=master)](https://travis-ci.org/makiolo/cmaki) +# quick +bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/bootstrap.sh) diff --git a/node_modules/npm-mas-mas/cmaki/Utils.cmake b/node_modules/npm-mas-mas/cmaki/Utils.cmake new file mode 100644 index 0000000..a76708c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/Utils.cmake @@ -0,0 +1,32 @@ +# returns true if only a single one of its arguments is true +function(xor result) + set(true_args_count 0) + + foreach(foo ${ARGN}) + if(foo) + math(EXPR true_args_count "${true_args_count}+1") + endif() + endforeach() + + if(NOT (${true_args_count} EQUAL 1)) + set(${result} FALSE PARENT_SCOPE) + else() + set(${result} TRUE PARENT_SCOPE) + endif() +endfunction() + +function(at_most_one result) + set(true_args_count 0) + + foreach(foo ${ARGN}) + if(foo) + math(EXPR true_args_count "${true_args_count}+1") + endif() + endforeach() + + if(${true_args_count} GREATER 1) + set(${result} FALSE PARENT_SCOPE) + else() + set(${result} TRUE PARENT_SCOPE) + endif() +endfunction() diff --git a/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh b/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh new file mode 100755 index 0000000..faeadbd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" + +if [[ "$WINEARCH" = "win32" ]]; then + wine $CMAKI_INSTALL/cmaki_identifier.exe +else + $CMAKI_EMULATOR $CMAKI_INSTALL/cmaki_identifier +fi + diff --git a/node_modules/npm-mas-mas/cmaki/cmaki.cmake b/node_modules/npm-mas-mas/cmaki/cmaki.cmake new file mode 100644 index 0000000..74b034f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/cmaki.cmake @@ -0,0 +1,529 @@ +if(NOT DEFINED CMAKE_MODULE_PATH) + set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}) +endif() + +IF(NOT DEFINED CMAKI_PATH) + set(CMAKI_PATH ${CMAKE_CURRENT_LIST_DIR}) +ENDIF() + +include("${CMAKE_CURRENT_LIST_DIR}/facts/facts.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/GitUtils.cmake") + +option(FIRST_ERROR "stop on first compilation error" FALSE) + +macro(cmaki_setup) + enable_modern_cpp() + enable_testing() + SET(CMAKE_BUILD_TYPE_INIT Release) + set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS ON) + IF(WITH_CONAN) + # Conan + message("-- Using conan dir: ${CMAKE_BINARY_DIR}") + include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") + conan_basic_setup() + ENDIF() +endmacro() + +macro (mark_as_internal _var) + set(${_var} ${${_var}} CACHE INTERNAL "hide this!" FORCE) +endmacro(mark_as_internal _var) + +macro (option_combobox _var options default_option comment) + set(${_var} "${default_option}" CACHE STRING "${comment}") + set(${_var}Values "${options}" CACHE INTERNAL "hide this!" FORCE) + set_property(CACHE ${_var} PROPERTY STRINGS ${${_var}Values}) +endmacro() + +function(cmaki_install_file FROM) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_file_into FROM TO) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_file_and_rename FROM NEWNAME) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) + endforeach() +endfunction() + +function(cmaki_install_file_into_and_rename FROM TO NEWNAME) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) + endforeach() +endfunction() + +function(cmaki_install_files FROM) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + FILE(GLOB files ${FROM}) + INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_files_into FROM TO) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + FILE(GLOB files ${FROM}) + INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +macro(cmaki_install_inside_dir _DESTINE) + file(GLOB DEPLOY_FILES_AND_DIRS "${_DESTINE}/*") + foreach(ITEM ${DEPLOY_FILES_AND_DIRS}) + IF( IS_DIRECTORY "${ITEM}" ) + LIST( APPEND DIRS_TO_DEPLOY "${ITEM}" ) + ELSE() + IF(ITEM STREQUAL "${_DESTINE}/CMakeLists.txt") + MESSAGE("skipped file: ${_DESTINE}/CMakeLists.txt") + ELSE() + LIST(APPEND FILES_TO_DEPLOY "${ITEM}") + ENDIF() + ENDIF() + endforeach() + INSTALL(FILES ${FILES_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) + INSTALL(DIRECTORY ${DIRS_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) +endmacro() + +macro(cmaki_install_dir _DESTINE) + INSTALL(DIRECTORY ${_DESTINE} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) +endmacro() + +macro(cmaki_parse_parameters) + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 _MAIN_NAME) + list(REMOVE_AT PARAMETERS 0) + SET(HAVE_TESTS FALSE) + SET(HAVE_PCH FALSE) + SET(HAVE_PTHREADS FALSE) + set(_DEPENDS) + set(_SOURCES) + set(_TESTS) + set(_PCH) + set(_INCLUDES) + set(_SUFFIX_DESTINATION) + set(NOW_IN SOURCES) + while(PARAMETERS) + list(GET PARAMETERS 0 PARM) + if(PARM STREQUAL DEPENDS) + set(NOW_IN DEPENDS) + elseif(PARM STREQUAL SOURCES) + set(NOW_IN SOURCES) + elseif(PARM STREQUAL TESTS) + set(NOW_IN TESTS) + elseif(PARM STREQUAL PCH) + set(NOW_IN PCH) + elseif(PARM STREQUAL PTHREADS) + if(NOT WIN32) + # no enabled in windows + set(HAVE_PTHREADS TRUE) + endif() + elseif(PARM STREQUAL INCLUDES) + set(NOW_IN INCLUDES) + elseif(PARM STREQUAL DESTINATION) + set(NOW_IN DESTINATION) + else() + if(NOW_IN STREQUAL DEPENDS) + set(_DEPENDS ${_DEPENDS} ${PARM}) + elseif(NOW_IN STREQUAL SOURCES) + set(_SOURCES ${_SOURCES} ${PARM}) + elseif(NOW_IN STREQUAL TESTS) + set(_TESTS ${_TESTS} ${PARM}) + SET(HAVE_TESTS TRUE) + elseif(NOW_IN STREQUAL PCH) + set(_PCH ${PARM}) + SET(HAVE_PCH TRUE) + elseif(NOW_IN STREQUAL INCLUDES) + set(_INCLUDES ${_INCLUDES} ${PARM}) + elseif(NOW_IN STREQUAL DESTINATION) + set(_SUFFIX_DESTINATION ${PARM}) + else() + message(FATAL_ERROR "Unknown argument ${PARM}.") + endif() + endif() + list(REMOVE_AT PARAMETERS 0) + endwhile() +endmacro() + +function(cmaki_simple_executable) + cmaki_parse_parameters(${ARGV}) + set(_EXECUTABLE_NAME ${_MAIN_NAME}) + MESSAGE("++ executable ${_EXECUTABLE_NAME}") + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_EXECUTABLE_NAME}) + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_EXECUTABLE_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + if(WIN32) + ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) + else() + ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) + endif() + target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_EXECUTABLE_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_EXECUTABLE_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_clang() + +endfunction() + +function(cmaki_simple_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + MESSAGE("++ library ${_LIBRARY_NAME}") + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_LIBRARY_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_clang() + +endfunction() + +function(cmaki_simple_test) + cmaki_parse_parameters(${ARGV}) + set(_TEST_NAME ${_MAIN_NAME}) + common_flags() + common_linking(${_TEST_NAME}) + MESSAGE("++ test ${_TEST_NAME}") + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_TEST_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_executable(${_TEST_NAME} ${_SOURCES}) + target_link_libraries(${_TEST_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_TEST_NAME} -lpthread) + endif() + endif() + common_linking(${_TEST_NAME}) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_TEST_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + if(WIN32) + add_test( + NAME ${_TEST_NAME}__ + COMMAND ${_TEST_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + + if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + find_program(VALGRIND "valgrind") + if(VALGRIND) + add_test( + NAME ${_TEST_NAME}_memcheck + COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_cachegrind + COMMAND "${VALGRIND}" --tool=cachegrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_helgrind + COMMAND "${VALGRIND}" --tool=helgrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_callgrind + COMMAND "${VALGRIND}" --tool=callgrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_drd + COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + message(FATAL_ERROR "no valgrind detected") + endif() + else() + add_test( + NAME ${_TEST_NAME}_test + COMMAND bash cmaki_emulator.sh $ + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + CONFIGURATIONS ${BUILD_TYPE}) + endif() + endif() + endforeach() + generate_vcxproj_user(${_TEST_NAME}) + generate_clang() + +endfunction() + +macro(common_linking) + + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 TARGET) + # if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + # target_link_libraries(${TARGET} -lubsan) + # endif() + +endmacro() + +macro(common_flags) + + if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) + add_definitions(/wd4251) + add_definitions(/wd4275) + add_definitions(/wd4239) + add_definitions(/wd4316) + add_definitions(/wd4127) + add_definitions(/wd4245) + add_definitions(/wd4458) + add_definitions(/wd4146) + add_definitions(/wd4244) + add_definitions(/wd4189) + add_definitions(/wd4100) + add_definitions(/wd4706) + add_definitions(/WX /W4) + add_definitions(-Zm200) + endif() + + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") + endif() + +endmacro() + +macro(enable_modern_cpp) + + if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) + add_definitions(/EHsc) + add_definitions(/D_SCL_SECURE_NO_WARNINGS) + else() + # add_definitions(-fno-rtti -fno-exceptions ) + # activate all warnings and convert in errors + # add_definitions(-Weffc++) + # add_definitions(-pedantic -pedantic-errors) + + # Python: need disabling: initialization discards ‘const’ qualifier from pointer target type + # add_definitions(-Werror) + + add_definitions(-Wall -Wextra -Waggregate-return -Wcast-align -Wcast-qual -Wconversion) + add_definitions(-Wdisabled-optimization -Wformat=2 -Wformat-nonliteral -Wformat-security -Wformat-y2k) + add_definitions(-Wimport -Winit-self -Winline -Winvalid-pch -Wlong-long -Wmissing-field-initializers -Wmissing-format-attribute) + add_definitions(-Wpointer-arith -Wredundant-decls -Wshadow) + add_definitions(-Wstack-protector -Wunreachable-code -Wunused) + add_definitions(-Wunused-parameter -Wvariadic-macros -Wwrite-strings) + add_definitions(-Wswitch-default -Wswitch-enum) + # only gcc + # convert error in warnings + add_definitions(-Wno-error=shadow) + add_definitions(-Wno-error=long-long) + add_definitions(-Wno-error=aggregate-return) + add_definitions(-Wno-error=unused-variable) + add_definitions(-Wno-error=unused-parameter) + add_definitions(-Wno-error=deprecated-declarations) + add_definitions(-Wno-error=missing-include-dirs) + add_definitions(-Wno-error=packed) + add_definitions(-Wno-error=switch-default) + add_definitions(-Wno-error=float-equal) + add_definitions(-Wno-error=invalid-pch) + add_definitions(-Wno-error=cast-qual) + add_definitions(-Wno-error=conversion) + add_definitions(-Wno-error=switch-enum) + add_definitions(-Wno-error=redundant-decls) + add_definitions(-Wno-error=stack-protector) + add_definitions(-Wno-error=extra) + add_definitions(-Wno-error=unused-result) + add_definitions(-Wno-error=sign-compare) + + # raknet + add_definitions(-Wno-error=address) + add_definitions(-Wno-error=cast-qual) + add_definitions(-Wno-error=missing-field-initializers) + add_definitions(-Wno-error=write-strings) + add_definitions(-Wno-error=format-nonliteral) + + # sdl2 + add_definitions(-Wno-error=sign-conversion) + + # TODO: remove + add_definitions(-Wno-error=reorder) + + # if not have openmp + add_definitions(-Wno-error=unknown-pragmas) + + if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + add_definitions(-Wno-error=suggest-attribute=format) + add_definitions(-Wno-error=suggest-attribute=noreturn) + add_definitions(-Wno-aggregate-return) + add_definitions(-Wno-long-long) + add_definitions(-Wno-shadow) + add_definitions(-Wno-strict-aliasing) + add_definitions(-Wno-error=inline) + add_definitions(-Wno-error=maybe-uninitialized) + add_definitions(-Wno-error=unused-but-set-variable) + add_definitions(-Wno-error=unused-local-typedefs) + # add_definitions(-Wno-error=float-conversion) + else() + add_definitions(-Wstrict-aliasing=2) + add_definitions(-Wno-error=format-nonliteral) + add_definitions(-Wno-error=cast-align) + add_definitions(-Wno-error=deprecated-register) + add_definitions(-Wno-error=mismatched-tags) + add_definitions(-Wno-error=overloaded-virtual) + add_definitions(-Wno-error=unused-private-field) + add_definitions(-Wno-error=unreachable-code) + # add_definitions(-Wno-error=discarded-qualifiers) + endif() + + # In Linux default now is not export symbols + # add_definitions(-fvisibility=hidden) + + # stop in first error + if(FIRST_ERROR) + add_definitions(-Wfatal-errors) + endif() + + endif() + + if (NOT DEFINED EXTRA_DEF) + if(NOT WIN32 OR MINGW OR MSYS) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) + CHECK_CXX_COMPILER_FLAG("-std=c++1y" COMPILER_SUPPORTS_CXX1Y) + CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) + CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) + + if(COMPILER_SUPPORTS_CXX14) + set(CMAKE_CXX_STANDARD 14) + message("-- C++14 Enabled") + elseif(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_STANDARD 11) + message("-- C++11 Enabled") + elseif(COMPILER_SUPPORTS_CXX0X) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + message("-- C++0x Enabled") + else() + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") + endif() + endif() + else() + add_definitions(${EXTRA_DEF}) + endif() + + # TODO: need different combinations of artifacts (coverage=off / coverage=on, etc ...) + # if ((DEFINED COVERAGE) AND (COVERAGE STREQUAL "TRUE")) + # https://github.com/google/sanitizers/wiki/AddressSanitizerAsDso + # flags + if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 --coverage") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-elide-constructors") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-inline") + endif() + + # linker flags + if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) + SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage") + endif() + # endif() + +endmacro() + +macro(generate_vcxproj_user _EXECUTABLE_NAME) + IF(MSVC) + set(project_vcxproj_user "${CMAKE_CURRENT_BINARY_DIR}/${_EXECUTABLE_NAME}.vcxproj.user") + if (NOT EXISTS ${project_vcxproj_user}) + FILE(WRITE "${project_vcxproj_user}" + "\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n") + endif() + ENDIF() +endmacro() + +macro(generate_clang) + # Generate .clang_complete for full completation in vim + clang_complete + set(extra_parameters "") + get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) + foreach(dir ${dirs}) + set(extra_parameters ${extra_parameters} -I${dir}) + endforeach() + get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY COMPILE_DEFINITIONS) + foreach(dir ${dirs}) + set(extra_parameters ${extra_parameters} -D${dir}) + endforeach() + STRING(REGEX REPLACE ";" "\n" extra_parameters "${extra_parameters}") + FILE(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/.clang_complete" "${extra_parameters}\n") +endmacro() diff --git a/node_modules/npm-mas-mas/cmaki/facts/facts.cmake b/node_modules/npm-mas-mas/cmaki/facts/facts.cmake new file mode 100644 index 0000000..b5409fd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/facts/facts.cmake @@ -0,0 +1,735 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) +cmake_policy(SET CMP0045 OLD) + +find_program(PYTHON_EXECUTABLE NAMES python3.6 python3.5 python3 python) + +IF(NOT DEFINED CMAKI_PWD) + set(CMAKI_PWD $ENV{CMAKI_PWD}) +ENDIF() + +IF(NOT DEFINED CMAKI_INSTALL) + set(CMAKI_INSTALL $ENV{CMAKI_INSTALL}) +ENDIF() + +IF(NOT DEFINED NPP_ARTIFACTS_PATH) + set(NPP_ARTIFACTS_PATH ${CMAKI_PWD}/artifacts) +ENDIF() + +IF(NOT DEFINED CMAKE_PREFIX_PATH) + set(CMAKE_PREFIX_PATH ${NPP_ARTIFACTS_PATH}/cmaki_find_package) +ENDIF() + +IF(NOT DEFINED NPP_GENERATOR_PATH) + set(NPP_GENERATOR_PATH ${CMAKI_PATH}/../cmaki_generator) +ENDIF() + +IF(NOT DEFINED NPP_PACKAGE_JSON_FILE) + set(NPP_PACKAGE_JSON_FILE ${CMAKI_PATH}/../../artifacts.json) +ENDIF() + +if(NOT DEFINED CMAKI_IDENTIFIER OR NOT DEFINED CMAKI_PLATFORM) + set(ENV{CMAKI_INFO} ALL) + include(${CMAKI_PWD}/bin/cmaki_identifier.cmake) + set(CMAKI_IDENTIFIER "${PLATFORM}") + set(CMAKI_PLATFORM "${PLATFORM}") +endif() + +MESSAGE("CMAKI_PWD = ${CMAKI_PWD}") +MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") +MESSAGE("CMAKI_PATH = ${CMAKI_PATH}") +MESSAGE("NPP_ARTIFACTS_PATH = ${NPP_ARTIFACTS_PATH}") +MESSAGE("NPP_GENERATOR_PATH = ${NPP_GENERATOR_PATH}") +MESSAGE("NPP_PACKAGE_JSON_FILE = ${NPP_PACKAGE_JSON_FILE}") +MESSAGE("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") +MESSAGE("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") +MESSAGE("CMAKI_IDENTIFIER = ${CMAKI_IDENTIFIER}") +MESSAGE("CMAKI_PLATFORM = ${CMAKI_PLATFORM}") + +function(cmaki_find_package) + + message("-- begin cmaki_find_package") + + set(PARAMETERS ${ARGV}) + list(LENGTH PARAMETERS ARGV_LENGTH) + list(GET PARAMETERS 0 PACKAGE) + set(VERSION_REQUEST "") + set(CALL_RECURSIVE "TRUE") + set(PARM1 "") + if(ARGV_LENGTH GREATER 1) + list(GET PARAMETERS 1 PARM1) + message("-- extra parm1: ${PARM1}") + if(PARM1 STREQUAL "NONRECURSIVE") + message("${PACKAGE} is not recursive") + set(CALL_RECURSIVE "FALSE") + else() + message("${PACKAGE} is recursive") + set(VERSION_REQUEST "${PARM1}") + endif() + endif() + + IF(NOT DEFINED CMAKI_REPOSITORY) + set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") + ENDIF() + + # 2.5. define flags + set(FORCE_GENERATION NOT "$ENV{NPP_CACHE}") + + if(VERSION_REQUEST STREQUAL "") + ## + message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE}") + ## + # 1. obtener la version actual (o ninguno en caso de no tener el artefacto) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + set(VERSION_REQUEST "${RESULT_VERSION}") + set(EXTRA_VERSION "--version=${VERSION_REQUEST}") + else() + set(VERSION_REQUEST "") + set(EXTRA_VERSION "") + endif() + + else() + # explicit version required from parameters + set(EXTRA_VERSION "--version=${VERSION_REQUEST}") + endif() + + message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION}") + ####################################################### + # 2. obtener la mejor version buscando en la cache local y remota + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + list(GET RESULT_VERSION 0 PACKAGE_MODE) + list(GET RESULT_VERSION 1 PACKAGE_NAME) + list(GET RESULT_VERSION 2 VERSION) + message("now PACKAGE_MODE = ${PACKAGE_MODE}") + message("now PACKAGE_NAME = ${PACKAGE_NAME}") + message("now VERSION = ${VERSION}") + if(PACKAGE_MODE STREQUAL "UNSUITABLE") + set(PACKAGE_MODE "EXACT") + set(VERSION ${VERSION_REQUEST}) + message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 1)") + # avoid remote cache, need build + set(FORCE_GENERATION "TRUE") + endif() + else() + set(PACKAGE_MODE "EXACT") + set(VERSION ${VERSION_REQUEST}) + message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 2)") + # avoid remote cache, need build + set(FORCE_GENERATION "TRUE") + endif() + ####################################################### + + # cmaki_find_package of depends + message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE DEPENDS_PACKAGES + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if("${CALL_RECURSIVE}") + foreach(DEP ${DEPENDS_PACKAGES}) + if(PACKAGE STREQUAL "${DEP}") + message("-- skip: ${DEP}") + else() + message("-- cmaki_find_package: ${DEP}") + cmaki_find_package("${DEP}" NONRECURSIVE) + endif() + endforeach() + endif() + + get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) + get_filename_component(package_name_version "${package_dir}" NAME) + + # 3. si no tengo los ficheros de cmake, los intento descargar + set(artifacts_dir "${NPP_ARTIFACTS_PATH}") + set(depends_bin_package "${artifacts_dir}/${PACKAGE}-${VERSION}") + set(depends_package "${artifacts_dir}/${PACKAGE}-${VERSION}") + # pido un paquete, en funcion de: + # - paquete + # - version + # - plataforma + # - modo (COMPATIBLE / EXACT) + # Recibo el que mejor se adapta a mis especificaciones + # Otra opcion es enviar todos los ficheros de cmake de todas las versiones + + set(package_cmake_filename "${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz") + set(package_marker "${CMAKE_PREFIX_PATH}/${package_name_version}/${CMAKI_IDENTIFIER}.cmake") + set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") + set(package_generated_file ${artifacts_dir}/${package_filename}) + + set(COPY_SUCCESFUL FALSE) + IF(EXISTS "${package_cmake_abspath}") + message("-- reusing cmake file ${package_cmake_abspath}") + set(COPY_SUCCESFUL TRUE) + else() + if(NOT "${FORCE_GENERATION}") + set(http_package_cmake_filename "${CMAKI_REPOSITORY}/download.php?file=${package_cmake_filename}") + message("-- download file: ${http_package_cmake_filename} in ${package_cmake_abspath}") + cmaki_download_file("${http_package_cmake_filename}" "${package_cmake_abspath}") + if(NOT "${COPY_SUCCESFUL}") + file(REMOVE "${package_binary_filename}") + message("Error downloading ${http_package_cmake_filename}") + endif() + else() + message("WARN: no using cache remote for: ${PACKAGE}") + endif() + endif() + + if(NOT "${COPY_SUCCESFUL}") + message("fail download") + else() + message("reused or downloaded") + endif() + + # si la descarga no ha ido bien O no quieres utilizar cache + if(NOT "${COPY_SUCCESFUL}" OR FORCE_GENERATION STREQUAL "TRUE") + + # 5. compilo y genera el paquete en local + message("Generating artifact ${PACKAGE} ...") + + ### + message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o") + ### + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + RESULT_VARIABLE artifacts_result + ) + if(artifacts_result) + message(FATAL_ERROR "can't create artifact ${PACKAGE}: error ${artifacts_result}") + endif() + + ####################################################### + # 6: obtengo la version del paquete creado + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + list(GET RESULT_VERSION 0 PACKAGE_MODE) + list(GET RESULT_VERSION 1 PACKAGE_NAME) + list(GET RESULT_VERSION 2 VERSION) + message("NEW! PACKAGE_MODE = ${PACKAGE_MODE}") + message("NEW! PACKAGE_NAME = ${PACKAGE_NAME}") + message("NEW! VERSION = ${VERSION}") + else() + message(FATAL_ERROR "-- not found ${PACKAGE}.") + endif() + ####################################################### + + set(package_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz) + set(package_cmake_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz) + # refresh name (NEW $VERSION is generated) + set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") + + # 7. descomprimo el artefacto + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" + WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}" + RESULT_VARIABLE uncompress_result + ) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") + endif() + + # y tambien descomprimo el propio tar gz + # execute_process( + # COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_generated_file}" + # WORKING_DIRECTORY "${artifacts_dir}/" + # RESULT_VARIABLE uncompress_result2 + # ) + # if(uncompress_result2) + # message(FATAL_ERROR "Extracting ${package_generated_file} failed! Error ${uncompress_result2}") + # endif() + + # tengo el cmake pero no esta descomprimido + elseif(EXISTS "${package_cmake_abspath}" AND NOT EXISTS "${package_marker}") + + message("-- only uncompress") + ################ + message("${CMAKE_COMMAND} -E tar zxf ${package_cmake_abspath}") + ################ + + # 10. lo descomprimo + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" + WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}/" + RESULT_VARIABLE uncompress_result) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") + endif() + + else() + + # tengo cmake, y esta descomprmido + message("-- nothing to do") + message("-- ${package_cmake_abspath}") + message("-- ${package_marker}") + + endif() + + + # 12. hacer find_package tradicional, ahora que tenemos los ficheros de cmake + if(${PACKAGE_MODE} STREQUAL "EXACT") + message("-- using ${PACKAGE} ${VERSION} in EXACT") + find_package(${PACKAGE} ${VERSION} EXACT REQUIRED) + else() + message("-- using ${PACKAGE} ${VERSION} in COMPATIBLE") + find_package(${PACKAGE} ${VERSION} REQUIRED) + endif() + + # generate json + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/save_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} --version=${VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + message("error saving ${PACKAGE}:${VERSION} in ${artifacts_dir}") + endif() + + # 13 add includes + string(TOUPPER "${PACKAGE}" PACKAGE_UPPER) + foreach(INCLUDE_DIR ${${PACKAGE_UPPER}_INCLUDE_DIRS}) + list(APPEND CMAKI_INCLUDE_DIRS "${INCLUDE_DIR}") + endforeach() + + # 14. add libdirs + foreach(LIB_DIR ${${PACKAGE_UPPER}_LIBRARIES}) + list(APPEND CMAKI_LIBRARIES "${LIB_DIR}") + endforeach() + + # 15. add vers specific + set(${PACKAGE_UPPER}_INCLUDE_DIRS "${${PACKAGE_UPPER}_INCLUDE_DIRS}" PARENT_SCOPE) + set(${PACKAGE_UPPER}_LIBRARIES "${${PACKAGE_UPPER}_LIBRARIES}" PARENT_SCOPE) + + # 16. add vars globals + set(CMAKI_INCLUDE_DIRS "${CMAKI_INCLUDE_DIRS}" PARENT_SCOPE) + set(CMAKI_LIBRARIES "${CMAKI_LIBRARIES}" PARENT_SCOPE) + + message("-- end cmaki_find_package") + +endfunction() + +macro(cmaki_package_version_check) + # llamar a check_remote_version + # dando el nombre recibo la version + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE_FIND_NAME} --version=${PACKAGE_FIND_VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + list(GET RESULT_VERSION 0 RESULT) + list(GET RESULT_VERSION 1 NAME) + list(GET RESULT_VERSION 2 VERSION) + ################################### + set(PACKAGE_VERSION_${RESULT} 1) + set(${NAME}_VERSION ${VERSION}) +endmacro() + +function(cmaki_install_3rdparty) + foreach(CMAKI_3RDPARTY_TARGET ${ARGV}) + foreach(CMAKI_BUILD_TYPE ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) + string(TOUPPER "${CMAKI_BUILD_TYPE}" CMAKI_BUILD_TYPE_UPPER) + get_target_property(CMAKI_3RDPARTY_TARGET_TYPE ${CMAKI_3RDPARTY_TARGET} TYPE) + if(${CMAKI_3RDPARTY_TARGET_TYPE} STREQUAL "SHARED_LIBRARY") + get_target_property(CMAKI_3RDPARTY_TARGET_LOCATION ${CMAKI_3RDPARTY_TARGET} IMPORTED_LOCATION_${CMAKI_BUILD_TYPE_UPPER}) + get_target_property(CMAKI_3RDPARTY_TARGET_SONAME ${CMAKI_3RDPARTY_TARGET} IMPORTED_SONAME_${CMAKI_BUILD_TYPE_UPPER}) + get_target_property(CMAKI_3RDPARTY_TARGET_PDB ${CMAKI_3RDPARTY_TARGET} IMPORTED_PDB_${CMAKI_BUILD_TYPE_UPPER}) + if(CMAKI_3RDPARTY_TARGET_SONAME) + get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION_PATH "${CMAKI_3RDPARTY_TARGET_LOCATION}" PATH) + set(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION_PATH}/${CMAKI_3RDPARTY_TARGET_SONAME}") + endif() + get_filename_component(CMAKI_3RDPARTY_TARGET_INSTALLED_NAME "${CMAKI_3RDPARTY_TARGET_LOCATION}" NAME) + get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION}" REALPATH) + install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_LOCATION} + DESTINATION ${CMAKI_BUILD_TYPE} + CONFIGURATIONS ${CMAKI_BUILD_TYPE} + RENAME ${CMAKI_3RDPARTY_TARGET_INSTALLED_NAME}) + if((NOT UNIX) AND EXISTS ${CMAKI_3RDPARTY_TARGET_PDB}) + get_filename_component(CMAKI_3RDPARTY_TARGET_PDB_NAME "${CMAKI_3RDPARTY_TARGET_PDB}" NAME) + install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_PDB} + DESTINATION ${CMAKI_BUILD_TYPE} + CONFIGURATIONS ${CMAKI_BUILD_TYPE} + RENAME ${CMAKI_3RDPARTY_TARGET_PDB_NAME}) + endif() + endif() + endforeach() + endforeach() +endfunction() + +function(cmaki_download_file THE_URL INTO_FILE) + set(COPY_SUCCESFUL FALSE PARENT_SCOPE) + file(DOWNLOAD ${THE_URL} ${INTO_FILE} STATUS RET) + list(GET RET 0 RET_CODE) + if(RET_CODE EQUAL 0) + set(COPY_SUCCESFUL TRUE PARENT_SCOPE) + else() + set(COPY_SUCCESFUL FALSE PARENT_SCOPE) + endif() +endfunction() + +macro(cmaki_download_package) + + message("-- begin cmaki_download_package") + if(NOT DEFINED CMAKI_REPOSITORY) + set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") + endif() + get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) + get_filename_component(package_name_version "${package_dir}" NAME) + set(package_filename "${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz") + set(http_package_filename ${CMAKI_REPOSITORY}/download.php?file=${package_filename}) + set(artifacts_dir "${NPP_ARTIFACTS_PATH}") + get_filename_component(artifacts_dir "${artifacts_dir}" ABSOLUTE) + set(package_binary_filename "${artifacts_dir}/${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz") + set(package_uncompressed_dir "${artifacts_dir}/${package_name_version}-binary.tmp") + set(package_marker "${artifacts_dir}/${package_name_version}/${CMAKI_IDENTIFIER}") + set(package_compressed_md5 "${package_dir}/${package_name_version}-${CMAKI_IDENTIFIER}.md5") + set(_MY_DIR "${package_dir}") + set(_DIR "${artifacts_dir}/${package_name_version}") + + if(NOT EXISTS "${package_binary_filename}") + message("download ${package_binary_filename} ...") + if(EXISTS "${package_compressed_md5}") + file(READ "${package_compressed_md5}" md5sum ) + string(REGEX MATCH "[0-9a-fA-F]*" md5sum "${md5sum}") + # TODO: use md5sum (use python for download) + # cmaki_download_file("${http_package_filename}" "${package_binary_filename}" "${md5sum}" ) + message("downloading ${http_package_filename}") + cmaki_download_file("${http_package_filename}" "${package_binary_filename}") + if(NOT "${COPY_SUCCESFUL}") + file(REMOVE "${package_binary_filename}") + message(FATAL_ERROR "Error downloading ${http_package_filename}") + endif() + else() + file(REMOVE_RECURSE "${package_dir}") + file(REMOVE_RECURSE "${_DIR}") + MESSAGE(FATAL_ERROR "Checksum for ${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz not found. Rejecting to download an untrustworthy file.") + endif() + endif() + + if(NOT EXISTS "${package_marker}") + message("Extracting ${package_binary_filename} into ${package_uncompressed_dir}...") + file(MAKE_DIRECTORY "${package_uncompressed_dir}") + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_binary_filename}" + WORKING_DIRECTORY "${package_uncompressed_dir}" + RESULT_VARIABLE uncompress_result) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_binary_filename} failed! Error ${uncompress_result}") + endif() + file(COPY "${package_uncompressed_dir}/${package_name_version}" DESTINATION "${artifacts_dir}") + file(REMOVE_RECURSE "${package_uncompressed_dir}") + endif() + message("-- end cmaki_download_package") + +endmacro() + +function(cmaki_executable) + cmaki_parse_parameters(${ARGV}) + set(_EXECUTABLE_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_EXECUTABLE_NAME}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + if(WIN32) + ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) + else() + ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) + endif() + # set_target_properties(${_EXECUTABLE_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_EXECUTABLE_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_EXECUTABLE_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_EXECUTABLE_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_EXECUTABLE_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_vcxproj_user(${_EXECUTABLE_NAME}) + +endfunction() + +function(cmaki_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) + # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_static_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + add_definitions(-D${_LIBRARY_NAME}_STATIC) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} STATIC ${_SOURCES}) + # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_test) + cmaki_parse_parameters(${ARGV}) + set(_TEST_NAME ${_MAIN_NAME}) + set(_TEST_SUFFIX "_unittest") + common_flags() + common_linking(${_TEST_NAME}${_TEST_SUFFIX}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_executable(${_TEST_NAME}${_TEST_SUFFIX} ${_SOURCES}) + # set_target_properties(${_TEST_NAME}${_TEST_SUFFIX} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_TEST_NAME}${_TEST_SUFFIX} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + find_program(VALGRIND "valgrind") + if(VALGRIND) + add_test( + NAME ${_TEST_NAME}_valgrind_memcheck + COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_cachegrind + COMMAND "${VALGRIND}" --tool=cachegrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_helgrind + COMMAND "${VALGRIND}" --tool=helgrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_callgrind + COMMAND "${VALGRIND}" --tool=callgrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_valgrind_drd + COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + message(FATAL_ERROR "no valgrind detected") + endif() + endif() + if(WIN32) + add_test( + NAME ${_TEST_NAME}${_TEST_SUFFIX} + COMMAND $ + WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE}) + else() + add_test( + NAME ${_TEST_NAME}${_TEST_SUFFIX} + COMMAND bash ../cmaki_emulator.sh $ + WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE}) + endif() + endforeach() + generate_vcxproj_user(${_TEST_NAME}) + +endfunction() + +macro(cmaki_google_test) + find_package(GTest REQUIRED) + find_package(GMock REQUIRED) + add_definitions(-DWITH_MAIN) + add_definitions(-DWITH_GMOCK) + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 _MAIN_NAME) + cmaki_test(${ARGV}) +endmacro() + +macro(cmaki_python_library) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + cmaki_library(${ARGV} PTHREADS) + cmaki_parse_parameters(${ARGV}) + set_target_properties(${_MAIN_NAME} PROPERTIES PREFIX "") + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_MAIN_NAME} + DESTINATION ${BUILD_TYPE}/lib/python3.5/lib-dynload + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endmacro() + +macro(cmaki_boost_python_test) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + cmaki_google_test(${ARGV} PTHREADS) + cmaki_parse_parameters(${ARGV}) + set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") +endmacro() + +macro(cmaki_python_test) + # cmaki_find_package(python) + cmaki_parse_parameters(${ARGV}) + add_test( NAME ${_MAIN_NAME}_test + COMMAND ./bin/python3 ${_SOURCES} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) + set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "LD_LIBRARY_PATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") +endmacro() + +macro(cmaki_python_install) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + get_filename_component(PYTHON_DIR ${PYTHON_EXECUTABLE} DIRECTORY) + get_filename_component(PYTHON_PARENT_DIR ${PYTHON_DIR} DIRECTORY) + cmaki_install_inside_dir(${PYTHON_PARENT_DIR}) +endmacro() + +macro(cmaki_find_package_boost) + if(CMAKE_BUILD_TYPE MATCHES Debug) + set(Boost_DEBUG 1) + else() + set(Boost_DEBUG 0) + endif() + find_package(Boost REQUIRED) + include_directories(${Boost_INCLUDE_DIRS}) +endmacro() + diff --git a/node_modules/npm-mas-mas/cmaki/init/.clang-format b/node_modules/npm-mas-mas/cmaki/init/.clang-format new file mode 100644 index 0000000..008e6b0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/init/.clang-format @@ -0,0 +1,66 @@ +--- +Language: Cpp +# BasedOnStyle: WebKit +# indent public: +AccessModifierOffset: -4 +AlignAfterOpenBracket: false +AlignEscapedNewlinesLeft: false +AlignOperands: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortBlocksOnASingleLine: false +AllowShortCaseLabelsOnASingleLine: false +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AllowShortFunctionsOnASingleLine: All +AlwaysBreakAfterDefinitionReturnType: false +AlwaysBreakTemplateDeclarations: true +AlwaysBreakBeforeMultilineStrings: false +BreakBeforeBinaryOperators: All +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: true +BinPackParameters: true +BinPackArguments: true +ColumnLimit: 100 +ConstructorInitializerAllOnOneLineOrOnePerLine: false +ConstructorInitializerIndentWidth: 4 +DerivePointerAlignment: false +ExperimentalAutoDetectBinPacking: false +IndentCaseLabels: true +IndentWrappedFunctionNames: false +IndentFunctionDeclarationAfterType: false +MaxEmptyLinesToKeep: 2 +KeepEmptyLinesAtTheStartOfBlocks: true +NamespaceIndentation: Inner +ObjCBlockIndentWidth: 4 +ObjCSpaceAfterProperty: true +ObjCSpaceBeforeProtocolList: true +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 300 +PenaltyBreakString: 1000 +PenaltyBreakFirstLessLess: 120 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 60 +PointerAlignment: Left +SpacesBeforeTrailingComments: 2 +Cpp11BracedListStyle: true +Standard: Cpp11 +IndentWidth: 4 +TabWidth: 4 +UseTab: Always +BreakBeforeBraces: Allman +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpacesInAngles: false +SpaceInEmptyParentheses: false +SpacesInCStyleCastParentheses: false +SpaceAfterCStyleCast: false +SpacesInContainerLiterals: true +SpaceBeforeAssignmentOperators: true +ContinuationIndentWidth: 4 +CommentPragmas: '^ IWYU pragma:' +ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] +SpaceBeforeParens: ControlStatements +DisableFormat: false +... + diff --git a/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl new file mode 100644 index 0000000..3ea29e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl @@ -0,0 +1,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BuildName: + BuildStamp: + Name: + Generator: + CompilerName: + OSName: + Hostname: + OSRelease: + OSVersion: + OSPlatform: + Is64Bits: + VendorString: + VendorID: + FamilyID: + ModelID: + ProcessorCacheSize: + NumberOfLogicalCPU: + NumberOfPhysicalCPU: + TotalVirtualMemory: + TotalPhysicalMemory: + LogicalProcessorsPerPhysical: + ProcessorClockFrequency: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki/junit/README.md b/node_modules/npm-mas-mas/cmaki/junit/README.md new file mode 100644 index 0000000..4f989c6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki/junit/README.md @@ -0,0 +1,3 @@ +# Source +https://bitbucket.org/shackra/ctest-jenkins/ + diff --git a/node_modules/npm-mas-mas/cmaki_docker/.travis.yml b/node_modules/npm-mas-mas/cmaki_docker/.travis.yml new file mode 100644 index 0000000..020ec9d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_docker/.travis.yml @@ -0,0 +1,4 @@ +services: docker +os: linux +script: + - ./build.sh diff --git a/node_modules/npm-mas-mas/cmaki_docker/LICENSE b/node_modules/npm-mas-mas/cmaki_docker/LICENSE new file mode 100644 index 0000000..53546c1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_docker/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_docker/README.md b/node_modules/npm-mas-mas/cmaki_docker/README.md new file mode 100644 index 0000000..594568c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_docker/README.md @@ -0,0 +1,11 @@ +# cmaki_docker + +[![Build Status](https://travis-ci.org/makiolo/cmaki_docker.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_docker) + +multiple pusher of docker images. + +``` +for image in (windows-x86, windows-x64, linux-x86, linux-x64, ...) + makiolo/$image = dockcross/$image + github:makiolo/cmaki_scripts/cmaki_depends.sh +done +``` diff --git a/node_modules/npm-mas-mas/cmaki_docker/build.sh b/node_modules/npm-mas-mas/cmaki_docker/build.sh new file mode 100755 index 0000000..26e71f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_docker/build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +#/bin/bash +prefix=$(pwd)/bin +mkdir -p $prefix + +# iterate in known images +curl https://raw.githubusercontent.com/dockcross/dockcross/master/Makefile -o dockcross-Makefile +for image in $(make -f dockcross-Makefile display_images); do + if [[ $(docker images -q dockcross/$image) != "" ]]; then + docker rmi -f dockcross/$image + echo dockcross/$image removed. + fi +done + +for image in $(make -f dockcross-Makefile display_images); do + + if [[ "$image" == "manylinux-x86" ]]; then + continue + fi + + if [[ "$image" == "manylinux-x64" ]]; then + continue + fi + + echo "copy dockcross/$image to makiolo/$image (with script change)" + cat<Dockerfile +FROM dockcross/$image:latest +ENV DEBIAN_FRONTEND noninteractive +RUN curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/cmaki_depends.sh | bash +EOF + + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + docker build . -t makiolo/$image + docker push makiolo/$image + + # clean + docker rmi -f dockcross/$image + docker rmi -f makiolo/$image +done + diff --git a/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt new file mode 100644 index 0000000..91cc3ac --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt @@ -0,0 +1,95 @@ +project(cmaki_generator) +cmake_minimum_required(VERSION 3.0) + +MESSAGE("-- compiler ${CMAKI_COMPILER}, platform ${CMAKI_PLATFORM}") + +include(cmaki) + +IF(CMAKE_BUILD_TYPE STREQUAL "Debug") + MESSAGE("-- Debug Mode") + SET(GLOBAL_BUILD_MODE "Debug") +ELSEIF(CMAKE_BUILD_TYPE STREQUAL "Release") + MESSAGE("-- Release Mode") + SET(GLOBAL_BUILD_MODE "Release") +ELSEIF(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") + MESSAGE("-- RelWithDebInfo Mode") + SET(GLOBAL_BUILD_MODE "RelWithDebInfo") +ELSE() + MESSAGE("-- Build mode default to Release") + MESSAGE("-- Release Mode") + SET(GLOBAL_BUILD_MODE "Release") +ENDIF() + +IF(NOT PACKAGE) + SET(PACKAGE "packagename_invalid") + MESSAGE(FATAL_ERROR "Invalid package name") +ENDIF() + +IF(NOT PACKAGE_VERSION) + SET(PACKAGE_VERSION "verson_invalid") + MESSAGE(FATAL_ERROR "Invalid version in package") +ENDIF() + +IF(NOT LIBRARY_TYPE) + SET(LIBRARY_TYPE "STATIC") +ENDIF() + +SET(PACKAGE "${PACKAGE}" CACHE STRING "Package to compile") +SET(PACKAGE_VERSION "${PACKAGE_VERSION}" CACHE STRING "Version to compile") + +SET(CMAKE_VERBOSE_MAKEFILE ON) +# Use relative paths on Windows, to reduce path size for command-line limits +if (WIN32) + set(CMAKE_USE_RELATIVE_PATHS true) + set(CMAKE_SUPPRESS_REGENERATION true) +endif() + +IF(NOT DEFINED GTC_INSTALL_PREFIX) + SET(GTC_INSTALL_PREFIX "${NPP_ARTIFACTS_PATH}/${PACKAGE}-${PACKAGE_VERSION}-${CMAKI_PLATFORM}/${PACKAGE}-${PACKAGE_VERSION}") + SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}") + SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder executables") + SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder libs") +ELSE() + SET(GTC_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") + SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") + SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/bin" CACHE PATH "Folder executables") + SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/lib" CACHE PATH "Folder libs") +ENDIF() + +MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") +MESSAGE("GTC_INSTALL_PREFIX = ${GTC_INSTALL_PREFIX}") +MESSAGE("CMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}") +MESSAGE("EXECUTABLE_OUTPUT_PATH = ${EXECUTABLE_OUTPUT_PATH}") +MESSAGE("LIBRARY_OUTPUT_PATH = ${LIBRARY_OUTPUT_PATH}") + +# gnu variables can prepend CMAKE_INSTALL_PREFIX +set(CMAKE_INSTALL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") +set(CMAKE_INSTALL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") +set(CMAKE_INSTALL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") +set(CMAKE_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") +set(CMAKE_INSTALL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") +set(CMAKE_INSTALL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") +set(CMAKE_INSTALL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") +set(CMAKE_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") +set(CMAKE_INSTALL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") +set(CMAKE_INSTALL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") +set(CMAKE_INSTALL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") +set(CMAKE_INSTALL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") +set(CMAKE_INSTALL_FULL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") +set(CMAKE_INSTALL_FULL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") +set(CMAKE_INSTALL_FULL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") +set(CMAKE_INSTALL_FULL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") +set(CMAKE_INSTALL_FULL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") +set(CMAKE_INSTALL_FULL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") +set(CMAKE_INSTALL_FULL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") +set(CMAKE_INSTALL_FULL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") +set(CMAKE_INSTALL_FULL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_FULL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_FULL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") +set(CMAKE_INSTALL_FULL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") +set(CMAKE_INSTALL_FULL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") +set(CMAKE_INSTALL_FULL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") +LINK_DIRECTORIES(${LIBRARY_OUTPUT_PATH}) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/LICENSE b/node_modules/npm-mas-mas/cmaki_generator/LICENSE new file mode 100644 index 0000000..7e79e4d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/npm-mas-mas/cmaki_generator/README.md b/node_modules/npm-mas-mas/cmaki_generator/README.md new file mode 100644 index 0000000..6b5b746 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/README.md @@ -0,0 +1,22 @@ +# cmaki_generator [![npm version](https://badge.fury.io/js/cmaki_generator.svg)](https://badge.fury.io/js/cmaki_generator) + +gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_generator.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_generator) + +# artifacts responsability +- boost-headers +- boost-system +- boost-random +- boost-atomic +- boost-thread +- boost-chrono +- boost-context +- boost-coroutine2 +- boost-signals +- boost-test +- boost-regex +- boost-filesystem +- boost-program-options +- python +- boost-python +- boost-python-debug +- boost-serialization diff --git a/node_modules/npm-mas-mas/cmaki_generator/build b/node_modules/npm-mas-mas/cmaki_generator/build new file mode 100755 index 0000000..c98e1d8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/build @@ -0,0 +1,10 @@ +#!/bin/bash + +directory=$(dirname $0) +if hash cygpath 2>/dev/null; then + directory=$(cygpath -w ${directory}) +fi + +python "${directory}/build.py" "$@" +out=$? +exit ${out} diff --git a/node_modules/npm-mas-mas/cmaki_generator/build.cmd b/node_modules/npm-mas-mas/cmaki_generator/build.cmd new file mode 100644 index 0000000..e0ea6bd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/build.cmd @@ -0,0 +1,11 @@ +@ECHO OFF +SET DIRWORK=%~dp0 + +IF EXIST "%PYTHON%" ( + rem ok +) ELSE ( + set PYTHON=python +) + +SET PATH=%~dp0\bin;%PATH% +"%PYTHON%" %DIRWORK%\build.py %* diff --git a/node_modules/npm-mas-mas/cmaki_generator/build.py b/node_modules/npm-mas-mas/cmaki_generator/build.py new file mode 100644 index 0000000..5d86829 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/build.py @@ -0,0 +1,757 @@ +import os +import os.path +import sys +import fnmatch +import logging +import utils +import argparse +import pipeline +import traceback +import copy +import datetime +# object package +from third_party import ThirdParty +from collections import OrderedDict +from third_party import exceptions_fail_group +from third_party import exceptions_fail_program +from third_party import alias_priority_name +from third_party import alias_priority_name_inverse +from third_party import CMAKELIB_URL +from third_party import is_valid +from third_party import is_blacklisted +from third_party import prepare_cmakefiles +# gtc stages +from purge import purge +from prepare import prepare +from compilation import compilation +from packing import packing +from run_tests import run_tests +from upload import upload +from get_return_code import get_return_code +from third_party import FailThirdParty + +# GLOBAL NO MUTABLES +image_pattern = "image.%Y.%m.%d.%H%M" + +try: + import yaml +except ImportError: + logging.error('[Warning] Not yaml library present') + logging.error('[Warning] PyYAML (python extension) is mandatory') + if utils.is_windows(): + logging.error('You can use pip for install:') + logging.error(' pip intall pyyaml') + sys.exit(1) + +# Global mutable +compiler_replace_maps = {} + +# Global const +yaml_common_references = 'common.yml' +yaml_collapsed_third_parties = '.3p.yml' +yaml_collapsed_final = '.data.yml' + +class Loader(yaml.Loader): + def __init__(self, stream): + self._root = os.path.split(stream.name)[0] + super(Loader, self).__init__(stream) + + def include(self, node): + filename = os.path.join(self._root, self.construct_scalar(node)) + with open(filename, 'r') as f: + return yaml.load(f, Loader) + +def amalgamation_yaml(rootdir, yamlfile=None): + Loader.add_constructor('!include', Loader.include) + + # autogeneration .data.yml + yaml_collapsed_final_abspath = os.path.join(rootdir, yaml_collapsed_final) + yaml_common_references_abspath = os.path.join(rootdir, yaml_common_references) + with open(yaml_collapsed_final_abspath, 'wt') as f: + f.write('# autogenerated file, dont edit it !!!---\n') + f.write('---\n') + # inject common.yml + f.write('%sreferences:\n' % (' '*4)) + with open(yaml_common_references_abspath, 'r') as fr: + for line in fr.readlines(): + f.write('%s%s' % (' '*8, line)) + collapse_third_parties(rootdir, yaml_collapsed_third_parties, yamlfile=yamlfile) + if yamlfile is None and not parameters.no_back_yaml: + node_modules_dir = os.path.abspath(os.path.join(rootdir, '..', '..')) + for path in os.listdir(node_modules_dir): + fullpath = os.path.join(os.path.abspath(node_modules_dir), path) + if os.path.isdir(fullpath): + cmaki_file = os.path.join(fullpath, 'cmaki.yml') + if os.path.isfile(cmaki_file): + with open(cmaki_file, 'r') as fr: + with open(yaml_collapsed_third_parties, 'a') as tp_append: + for line in fr.readlines(): + tp_append.write(line) + # inject third_parties.yml + f.write('%sthird_parties:\n' % (' '*4)) + with open(yaml_collapsed_third_parties) as ft: + for line in ft.readlines(): + # sys.stdout.write("searching {}".format(line)) + f.write('%s%s' % (' '*8, line)) + +def search_nodes_by_key(list_nodes, found_key): + nodes = [] + for key, node in list_nodes: + if key == found_key: + nodes.append(node) + return nodes + +def collapse_third_parties(rootdir, filename, yamlfile=None): + p = pipeline.make_pipe() + # begin + if yamlfile is None: + p = pipeline.find(rootdir, 3)(p) + else: + p = pipeline.echo(yamlfile)(p) + # exclusions + p = pipeline.endswith('.yml')(p) + p = pipeline.grep_v('.travis.yml')(p) + p = pipeline.grep_v('shippable.yml')(p) + p = pipeline.grep_v('appveyor.yml')(p) + p = pipeline.grep_v('codecov.yml')(p) + p = pipeline.grep_v('.github')(p) + p = pipeline.grep_v('docker-compose.yml')(p) + p = pipeline.grep_v('circle.yml')(p) + p = pipeline.grep_v('_config.yml')(p) + p = pipeline.grep_v('.circleci-matrix.yml')(p) + p = pipeline.grep_v('.build_')(p) + p = pipeline.grep_v(yaml_collapsed_final)(p) + p = pipeline.grep_v(yaml_common_references)(p) + p = pipeline.grep_v(yaml_collapsed_third_parties)(p) + p = pipeline.grep_v(' - Copy.yml')(p) + p = pipeline.info('---> (yaml found.) ')(p) + # cat + p = pipeline.cat()(p) + # p = pipeline.info('amalgamated: ')(p) + # write + p = pipeline.write_file(filename)(p) + # end + pipeline.end_pipe()(p) + +def run_purge(solutions): + + # create pipeline + with pipeline.create() as (p, finisher): + + # feed all packages + p = pipeline.feed(packages)(p) + + # clean intermediate folders + p = pipeline.do(purge, True, parameters)(p) + + # close pipe + finisher.send(p) + +def convert_priority_to_integer(priority): + if priority is not None: + error = False + if priority in alias_priority_name_inverse: + priority = alias_priority_name_inverse[priority] + else: + try: + priority_integer = int(priority) + if priority_integer in alias_priority_name: + priority = priority_integer + else: + error = True + except ValueError: + error = True + if error: + logging.error('Invalid priority name: %s' % priority) + sys.exit(1) + return priority + +def show_results(parameters, groups_ordered, rets, unittests): + # show final report + anyFail = 0 + if len(rets) > 0: + logging.info('-' * 80) + logging.info('') + for name in rets: + state = rets[name] + if state != "OK": + anyFail = 1 + + # package with unittests? + if name in unittests: + try: + result_test = unittests[name] + except KeyError: + result_test = 'No unittest found' + + if state != "OK": + logging.info("Compiled %30s - STATUS: %15s" % (name, state)) + else: + # only want know test result if is OK + logging.info("Compiled %30s - STATUS: %15s - TESTS: %s" % (name, state, result_test)) + else: + logging.info("Compiled %30s - STATUS: %15s" % (name, state)) + + logging.info('') + logging.info( '-'* 80) + else: + anyFail = 1 + logging.error('No results generated.') + + # any have exceptions ? + have_exceptions = False + for _, packages in groups_ordered: + for node in packages: + if len(node.exceptions) > 0: + have_exceptions = True + + if have_exceptions: + logging.error("---------- begin summary of exceptions ------------------------") + # show postponed exceptions + for _, packages in groups_ordered: + for node in packages: + if len(node.exceptions) > 0: + # something was wrong + anyFail = 1 + # show exceptions of this package + package = node.get_package_name() + version = node.get_version() + logging.error("package %s (%s) with exceptions" % (package, version)) + i = 0 + for exc_type, exc_value, exc_traceback in node.exceptions: + logging.error("---- Exception #%d / %d ----------" % (i+1, len(node.exceptions))) + traceback.print_exception(exc_type, exc_value, exc_traceback) + logging.error("----------------------------------") + i += 1 + logging.error("---------- end summary of exceptions ------------------------") + return anyFail + +def clean_subset(solutions): + groups = copy.deepcopy(solutions) + # 2/4: remove solutions are subset of other solution + for solution1 in solutions: + for solution2 in solutions: + if solution1 != solution2: + match = True + for node in solution1: + if node not in solution2: + match = False + break + if match and (solution1 in groups): + groups.remove(solution1) + return groups + +def init_parameter_path(value, default): + if value is None: + value = default + else: + # expand variables in no-windows + if not utils.is_windows(): + value = value.replace('~', utils.get_real_home()) + value = os.path.abspath(value) + return value + + +def parse_arguments(): + + parser = argparse.ArgumentParser(prog=""" + +cmaki_generator: + + Can build artifacts in a easy way. Each third-party need a block definition in yaml. This block contain all need information necessary for download, build, testing and packing. + +usage:""") + group_main = parser.add_argument_group('basic usage') + group_main.add_argument('packages', metavar='packages', type=str, nargs='*', + help='name (or list names) third party') + group_main.add_argument('--plan', '--dry-run', dest='plan', action='store_true', + help='Show packages plan (like a dry-run)', default=False) + group_main.add_argument('--server', dest='server', help='artifact server', default=None) + group_main.add_argument('--no-back-yaml', dest='no_back_yaml', action='store_true', help='no search back yaml', + default=False) + group_layer = group_main.add_mutually_exclusive_group() + group_layer.add_argument('--layer', dest='priority', + help='filter by layername. Valid values: (minimal|tools|third_party)', default=None) + group_layer.add_argument('--no-layer', dest='no_priority', + help='negation filter by layername. Valid values: (minimal|tools|third_party)', + default=None) + # group_main.add_argument('-t', '--tag', action='append', metavar='tag', type=str, help='NOT IMPLEMMENTED YET: filter tag third party') + group_padawan = parser.add_argument_group('padawan') + group_purge = group_padawan.add_mutually_exclusive_group() + group_purge.add_argument('--no-purge', dest='no_purge', action='store_true', help='remove purge from pipeline', + default=False) + group_purge.add_argument('--only-purge', dest='only_purge', action='store_true', + help='execute only purge in pipeline', default=False) + group_prepare = group_padawan.add_mutually_exclusive_group() + group_prepare.add_argument('--no-prepare', dest='no_prepare', action='store_true', + help='remove prepare from pipeline', default=False) + group_prepare.add_argument('--only-prepare', dest='only_prepare', action='store_true', + help='execute only prepare in pipeline', default=False) + group_compilation = group_padawan.add_mutually_exclusive_group() + group_compilation.add_argument('--no-compilation', dest='no_compilation', action='store_true', + help='remove compilation from pipeline', default=False) + group_compilation.add_argument('--only-compilation', dest='only_compilation', action='store_true', + help='execute only compilation in pipeline', default=False) + group_packing = group_padawan.add_mutually_exclusive_group() + group_packing.add_argument('--no-packing', dest='no_packing', action='store_true', + help='remove packing from pipeline', default=False) + group_packing.add_argument('--only-packing', dest='only_packing', action='store_true', + help='execute only packing in pipeline', default=False) + group_run_tests = group_padawan.add_mutually_exclusive_group() + group_run_tests.add_argument('--no-run-tests', dest='no_run_tests', action='store_true', + help='remove run_tests from pipeline', default=False) + group_run_tests.add_argument('--only-run-tests', dest='only_run_tests', action='store_true', + help='execute only run_tests in pipeline', default=False) + group_upload = group_padawan.add_mutually_exclusive_group() + group_upload.add_argument('--no-upload', dest='no_upload', action='store_true', help='remove upload from pipeline', + default=False) + group_upload.add_argument('--only-upload', dest='only_upload', action='store_true', + help='execute only upload in pipeline', default=False) + # creador de third parties + group_jedi = parser.add_argument_group('jedi') + group_jedi.add_argument('-o', '--only', dest='build_only', action='store_true', + help='build only explicit packages and not your depends') + group_jedi.add_argument('-v', '--verbose', action='count', help='verbose mode', default=0) + group_jedi.add_argument('-q', '--quiet', dest='quiet', action='store_true', help='quiet mode', default=False) + group_jedi.add_argument('-d', '--debug', action='store_true', help='Ridiculous debugging (probably not useful)') + group_jedi.add_argument('--purge-if-fail', dest='purge_if_fail', action='store_true', + help='purge even if a package finish with fail', default=False) + group_jedi.add_argument('--with-svn', dest='with_svn', help='svn executable', default=None) + group_jedi.add_argument('--fast', dest='fast', action='store_true', default=False, help=argparse.SUPPRESS) + group_jedi.add_argument('--log', dest='log', help='specified full path log (default is "gtc.log")', + default='gtc.log') + group_jedi.add_argument('--no-packing-cmakefiles', action='store_true', dest='no_packing_cmakefiles', + help='no packing cmakefiles', default=False) + group_jedi.add_argument('--blacklist', dest='blacklist', + help='third party in quarantine (default is $ROOTDIR + "blacklist.txt")', default=None) + group_jedi.add_argument('--no-blacklist', action='append', dest='no_blacklist', + help='list packages (separated with comma), for annular blacklist effect.', default=[]) + group_master_jedi = parser.add_argument_group('master jedi') + group_master_jedi.add_argument('--rootdir', dest='rootdir', + help='input folder with yamls, is recursive (default is current directory)', + default=None) + group_master_jedi.add_argument('--prefix', dest='prefix', + help='output folder where packages will be generated (default is $ROOTDIR + "artifacts")', + default=None) + group_master_jedi.add_argument('--cmakefiles', dest='cmakefiles', + help='input folder with cmake scripts (default is $PREFIX + "cmakelib")', + default=None) + group_master_jedi.add_argument('--third-party-dir', dest='third_party_dir', + help='output folder for cmakefiles (default is $CMAKEFILES + "3rdparty")', + default=None) + group_master_jedi.add_argument('--depends', dest='depends', help='json for save versions', default=None) + group_master_jedi.add_argument('--yaml', dest='yaml', help='unique file with third party to compile', default=None) + parameters = parser.parse_args() + ''' + TODO: + refactor: + prefix = DEPENDS_PATH (cmake3p) (artifacts) + cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmaki, cmaki_find_package) + third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) + rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD + CMAKI_INSTALL: donde se espera tener instalado el cmaki_identifier + ''' + + cmaki_pwd = os.environ.get('CMAKI_PWD', os.getcwd()) + cmaki_install = os.environ.get('CMAKI_INSTALL', os.path.join(cmaki_pwd, 'bin')) + + ''' + axiomas: + - cmaki_pwd + - cmaki_install + - cmaki + + reglas: + - rootdir = cmaki/../cmaki_generator + - prefix = cmaki_pwd/artifacts + - third-party-dir = prefix/cmaki_find_package + - depends = cmaki_pwd/depends.json + - blacklist = rootdir/blacklist.txt + ''' + + + parameters.rootdir = init_parameter_path(parameters.rootdir, os.getcwd()) + parameters.prefix = init_parameter_path(parameters.prefix, os.path.join(cmaki_pwd, 'artifacts')) + parameters.third_party_dir = init_parameter_path(parameters.third_party_dir, os.path.join(parameters.prefix, 'cmaki_find_package')) + parameters.cmakefiles = init_parameter_path(parameters.cmakefiles, os.path.join(parameters.rootdir, '..', 'cmaki')) + parameters.blacklist = init_parameter_path(parameters.blacklist, os.path.join(parameters.rootdir, 'blacklist.txt')) + parameters.depends = init_parameter_path(parameters.depends, os.path.join(cmaki_pwd, 'depends.json')) + + # convert priority to int + parameters.priority = convert_priority_to_integer(parameters.priority) + parameters.no_priority = convert_priority_to_integer(parameters.no_priority) + if parameters.only_purge: + parameters.no_purge = False + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_prepare: + parameters.no_purge = True + parameters.no_prepare = False + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_compilation: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = False + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_packing: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = False + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_run_tests: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = False + parameters.no_upload = True + elif parameters.only_upload: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = False + + if parameters.server is None: + if 'NPP_SERVER' not in os.environ: + logging.warning('Using artifacts server by default. If you need, can explicit define environment var NPP_SERVER') + os.environ['NPP_SERVER'] = 'http://artifacts.myftp.biz' + parameters.server = os.environ['NPP_SERVER'] + + + if 'NPP_CACHE' not in os.environ: + logging.warning('Using enablibing npm++ cache by default.') + os.environ['NPP_CACHE'] = 'TRUE' + + return parameters + + +if __name__ == '__main__': + + parameters = parse_arguments() + + # prepare logging + if parameters.debug: + utils.setup_logging(logging.DEBUG, parameters.log) + else: + utils.setup_logging(logging.INFO, parameters.log) + + if parameters.verbose: + logging.info('parameters = {}'.format(parameters)) + + if not parameters.quiet: + logging.info('---- MODE: {}'.format( os.environ['MODE'] )) + logging.info('---- CMAKI_PWD: {}'.format( os.environ['CMAKI_PWD'] )) + logging.info('---- CMAKI_INSTALL: {}'.format( os.environ['CMAKI_INSTALL'] )) + logging.info('---- rootdir: {}'.format(parameters.rootdir)) + logging.info('---- prefix: {}'.format(parameters.prefix)) + logging.info('---- cmakefiles: {}'.format(parameters.cmakefiles)) + logging.info('---- third_party_dir: {}'.format(parameters.third_party_dir)) + logging.info('---- blacklist: {}'.format(parameters.blacklist)) + logging.info('---- depends: {}'.format(parameters.depends)) + + + + # fetch remotes yaml + # i = 0 + # for package in parameters.packages: + # if package.startswith('github://'): + # repo = package[len('github://'):] + # utils.trymkdir('github') + # yml_file = os.path.join('github', '{}.yml'.format(repo.replace('/', '_'))) + # if os.path.isfile(yml_file): + # utils.tryremove(yml_file) + # try: + # download_from_url('https://raw.githubusercontent.com/{}/master/cmaki.yml'.format(repo), yml_file) + # except urllib2.HTTPError: + # logging.error('not found cmaki.yml in {}'.format(package)) + # sys.exit(1) + # parameters.packages[i] = repo.split('/')[1] + # i += 1 + + prepare_cmakefiles(parameters.cmakefiles) + + # generate amalgaimation yaml + amalgamation_yaml(parameters.rootdir, parameters.yaml) + + # load yaml to python + with open(yaml_collapsed_final, 'rt') as fy: + third_parties_data_yaml = yaml.load(fy, Loader) + + # generate list of tuples (key, parameters) + count = 0 + third_parties_data = [] + for third in third_parties_data_yaml['third_parties']: + for key in third: + parms = third[key] + third_parties_data.append( (key, parms) ) + count += 1 + + logging.info('Found {} packages.'.format(count)) + logging.info('Package requested: {}'.format(parameters.packages)) + + if count == 1 and (len(parameters.packages) == 0): + parameters.packages = [ third_parties_data[0][0] ] + + # create nodes and choose selected by filter and mask + nodes = [] + selected = [] + for key, parms in third_parties_data: + node = ThirdParty(parameters, key, parms) + # define variables for unused projects + package = node.get_package_name() + + # fill compiler_replace_maps + node.apply_replace_maps(compiler_replace_maps) + + if (node.is_valid() + and (parameters.priority is None or (parameters.priority == node.get_priority())) + and (parameters.no_priority is None or (parameters.no_priority != node.get_priority()))): + nodes.append( (key, node) ) + if (parameters.packages == ['.'] or parameters.packages == ['*']): + selected.append( (key, node) ) + elif ((parameters.packages == ['all']) and (not node.get_exclude_from_all())): + selected.append( (key, node) ) + else: + for exp in parameters.packages: + if fnmatch.fnmatch(key.lower(), exp.lower()): + selected.append( (key, node) ) + + logging.info('Selected {} packages.'.format(len(selected))) + + # create relations + for key, parms in third_parties_data: + try: + depends = parms['depends'] + mask = parms['mask'] + # depends valid + valid = is_valid(key, mask) + # depends blacklisted + blacklisted = is_blacklisted(parameters.blacklist, parameters.no_blacklist, key) + if (depends is not None) and valid and (not blacklisted): + for depend in depends: + nodes_key = search_nodes_by_key(nodes, key) + nodes_depend = search_nodes_by_key(nodes, depend) + for nk in nodes_key: + for nd in nodes_depend: + nk.needs(nd) + except KeyError: + # no need create relations + pass + + + # 1/7: Generate solutions in each node + solutions = [] + for key, select_node in selected: + resolved = [] + if not parameters.build_only: + select_node.resolver(resolved, []) + solutions.append( resolved ) + else: + solutions.append( [select_node] ) + + + # 2/7: clean subset + groups = clean_subset(solutions) + + + # 3/7: merge solutions with same root + sols3 = {} + for packages in groups: + first = packages[0] + if first not in sols3: + sols3[first] = [] + chunk = sols3[first] + for node in packages: + if node != first: + if node not in chunk: + chunk.append(node) + + + # 4/7: write final plan + groups = [] + for key, value in sols3.items(): + newsolution = [key] + for node in value: + newsolution.append(node) + groups.append(newsolution) + + + # 5/7: clean subset + groups = clean_subset(groups) + + # 6/7: sort groups + groups_ordered = [] + for packages in groups: + priority_total = 0 + for node in packages: + priority_total += node.get_priority() + priority_group = (priority_total / len(packages)) + groups_ordered.append( (priority_group, packages) ) + groups_ordered.sort(key=lambda tup: tup[0], reverse=False) + + # 7/7: validate groups + for priority_total, packages in groups_ordered: + if len(packages) > 0: + priority_initial = packages[0].get_priority() + for node in packages: + if priority_initial != node.get_priority(): + logging.error('[ERROR] You are mixing packages of different layers.') + logging.error('Invalid priority (%d) in package %s, expected %d:' % (node.get_priority(), node.get_package_name(), priority_initial)) + logging.error('Any in group have bad depends:') + for node in packages: + sys.stdout.write('%s, ' % node.get_package_name()) + sys.stdout.write('\n') + sys.exit(1) + + # show groups in --plan + if len(groups_ordered) > 0: + priority_prev = groups_ordered[0][0] + i = 0 + for priority_total, packages in groups_ordered: + if parameters.quiet: + j = 0 + for node in packages: + sys.stdout.write("%s" % node.get_package_name()) + if ((len(packages)-1) != j): + sys.stdout.write(";") + j += 1 + sys.stdout.write('\n') + else: + if (priority_total > priority_prev) or (i == 0): + if priority_total in alias_priority_name: + layer_name = alias_priority_name[priority_total] + else: + layer_name = '%d' % priority_total + sys.stdout.write('\nLayer: %s\n\n' % layer_name) + sys.stdout.write("\t[") + j = 0 + for node in packages: + sys.stdout.write("%s" % node.get_package_name()) + if ((len(packages)-1) != j): + sys.stdout.write(", ") + j += 1 + sys.stdout.write("]") + sys.stdout.write('\n') + + priority_prev = priority_total + i += 1 + sys.stdout.write('\n') + sys.stdout.flush() + else: + logging.warning('No results.') + # with --plan flag is like use --dry-run + if parameters.plan: + sys.exit(0) + + try: + rets = OrderedDict() + unittests = OrderedDict() + skipping_if_priority_gt = 999 + announce_once = False + # + # pipeline: prepare, compile, packing, run_tests + # + for priority_group, packages in groups_ordered: + + if priority_group > skipping_if_priority_gt: + if not announce_once: + logging.error("ignoring group because some previous group are failing:") + logging.warning('\tgroup is formed by:') + announce_once = True + else: + logging.warning('') + for node in packages: + logging.warning(' -- %s' % node.get_package_name()) + continue + + if len(packages) > 1: + logging.info('--- Start group ---') + for node in packages: + logging.info('- %s' % node.get_package_name()) + # prepare include scripts + node.generate_scripts_headers(compiler_replace_maps) + + try: + if not parameters.no_purge: + run_purge(packages) + + # create pipeline + p = pipeline.make_pipe() + + # feed third parties + p = pipeline.feed(packages)(p) + + if not parameters.no_prepare: + # download sources + p = pipeline.do(prepare, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_compilation: + # ./configure && make (configuration and compilation) + p = pipeline.do(compilation, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_packing: + # packing (generate .tar.gz) + p = pipeline.do(packing, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_run_tests: + # execute unittests and save results in "unittests" + p = pipeline.do(run_tests, False, parameters, compiler_replace_maps, unittests)(p) + + if not parameters.no_upload: + # upload artifacts + p = pipeline.do(upload, False, parameters, compiler_replace_maps)(p) + + # save results in "rets" + p = get_return_code(parameters, rets)(p) + + # close pipe + pipeline.end_pipe()(p) + + except FailThirdParty as e: + skipping_if_priority_gt = priority_group + logging.error("stopping full group.") + + except exceptions_fail_group: + logging.warning('Fatal exception in group:') + for node in packages: + logging.warning('-- %s' % node.get_package_name()) + + finally: + # only purge when you are executing a full group + if (not parameters.build_only) and (not parameters.no_purge): + if parameters.purge_if_fail: + run_purge(packages) + else: + # purge only if all packages are ok + ret = 0 + for node in packages: + ret += node.ret + + if ret == 0: + run_purge(packages) + else: + if len(packages) > 1: + logging.warning('Any in group is failing. No purge next group:') + for node in packages: + logging.warning(' %s' % node.get_package_name()) + else: + logging.warning('No purge %s because finished with fail' % node.get_package_name()) + + except exceptions_fail_program: + logging.warning('Force explicit exit ...') + finally: + ret = show_results(parameters, groups_ordered, rets, unittests) + sys.exit(ret) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py b/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py new file mode 100644 index 0000000..4ab073a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py @@ -0,0 +1,233 @@ +import os +import sys +import logging +import argparse +from requests import get # to make GET request +from io import StringIO +import csv +import utils +import functools + +version_separator = '.' +version_count_max = 4 + + +# def read_remote_csv(url): +# fp = urllib.request.urlopen(url) +# mybytes = fp.read() +# content = mybytes.decode("utf8") +# fp.close() +# return content + + +def read_remote_csv(url): + response = get(url) + response = response.content.decode("utf8") + return response + + +def version_to_tuple(version_str): + try: + if (version_str is not None) and (len(version_str) > 0): + count = len(version_str.split(version_separator)) + list_data = [int(x) for x in version_str.split(version_separator)] + zeros = [0 for x in range(version_count_max - count)] + list_data.extend(zeros) + return tuple(list_data) + else: + return None + except ValueError: + return None + + +class package(object): + def __init__(self, name, version, local): + self._name = name + self._version = version_to_tuple(version) + self._local = local + + def __repr__(self): + if self._version is not None: + list_version = list(self._version) + list_version = [str(x) for x in list_version] + join_version = version_separator.join(list_version) + else: + join_version = "last" + return "%s;%s" % (self._name, join_version) + + def __eq__(self, other): + return (self._name == other._name) or (self._name == '.') or (other._name == '.') + + def __ne__(self, other): + return not self.__eq__(other) + + def is_same_version(self, other): + return self._version == other._version + + def get_name(self): + return self._name + + def get_version(self): + return self._version + + def is_local(self): + return self._local + + +def sort_versions(local_swap): + if not local_swap: + one = 1 + else: + one = -1 + + def cmp(a, b): + if a.get_version() < b.get_version(): + return 1 + elif a.get_version() > b.get_version(): + return -1 + else: + if a.is_local() and not b.is_local(): + return -one + elif a.is_local() and b.is_local(): + return one + elif not a.is_local() and b.is_local(): + return one + else: + return one + return cmp + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--artifacts', dest='artifacts', help='3rdparty path with cmakefiles', default=None) + parser.add_argument('--server', dest='server', help='artifact server', default=None) + """ + Existe un valor especial de name ".". Sirve para hacer un listado de todos los artefactos + """ + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + """ + La version fijada tiene la siguiente prioridad: + - Version fijada mediante parametros + - Version fijada mediante fichero de dependencias + - Version ultima + """ + parser.add_argument('--version', dest='version', help='version package fixed', default=None) + # TODO: packagename-1.0.0.0-windows_32-msvc_2015-debug + # --platform deberia filtrar artefactos compatibles con "MI PLATAFORMA" + parser.add_argument('--platform', dest='platform', help='platform specified', default=None) + # --compiler deberia filtrar artefactos compatibles con "MI COMPILADOR" + parameters = parser.parse_args() + + package_request = package(parameters.name, parameters.version, True) + packages_found = [] + + if parameters.artifacts is not None: + # local + utils.trymkdir(parameters.artifacts) + for path in os.listdir(parameters.artifacts): + full_path = os.path.join(parameters.artifacts, path) + # directorios que contengan "-" + if os.path.isdir(full_path) and (full_path.find('-') != -1): + basename = os.path.basename(full_path) + try: + separator = basename.rindex('-') + package_name = basename[:separator] + package_version = basename[separator+1:] + new_package = package(package_name, package_version, True) + if new_package == package_request: + packages_found.append(new_package) + except ValueError: + pass # happen with 3rdpartyversions + + """ + Buscar paquetes recien generados + """ + if parameters.artifacts is not None: + # local + basename = None + for path in os.listdir(parameters.artifacts): + full_path = os.path.join(parameters.artifacts, path) + terminator = '-cmake.tar.gz' + if os.path.isfile(full_path) and (full_path.endswith(terminator)): + if parameters.platform is None: + logging.error('Platform is needed!') + sys.exit(1) + terminator = '-%s-cmake.tar.gz' % parameters.platform + basename = os.path.basename(full_path) + try: + if basename is not None: + separator = basename.rindex(terminator) + basename = basename[:separator] + separator = basename.rindex('-') + package_name = basename[:separator] + package_version = basename[separator+1:] + new_package = package(package_name, package_version, True) + if new_package == package_request: + packages_found.append(new_package) + except ValueError: + # not found platform in file + pass + + + if parameters.server is not None: + try: + if not parameters.server.endswith('?quiet'): + parameters.server = parameters.server + '/' + '?quiet' + csv_content = read_remote_csv(parameters.server) + reader = csv.reader(StringIO(csv_content), delimiter=';') + i = 0 + for row in reader: + if len(row) >= 2: + if i > 0: + package_name = row[0] + package_version = row[1] + package_platform = row[2] + new_package = package(package_name, package_version, False) + if (parameters.platform is None) or (parameters.platform == package_platform): + if new_package == package_request: + packages_found.append(new_package) + i += 1 + except IOError: + logging.debug('error in cache artifacts: %s' % parameters.server) + + + if len(packages_found) > 0: + + if parameters.version is None: + """ + Cuando no hay version, ordeno de mayor a menor. + Al pasar False al comparador aparece primero local y luego remote en caso de ser la misma version. + Selecciona el primero y sale. + """ + for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(False))): + if package_request.is_same_version(package): + print("EXACT;%s;%s" % (package, package.get_version())) + else: + print("COMPATIBLE;%s;%s" % (package, package.get_version())) + if parameters.name != '.': + sys.exit(0) + else: + """ + Cuando se especifica una version minima + Se ordena a la inversa, es decir de menor a mayor. + Se coge el primer paquete que cumple la restriccion de version. + Al pasar True al comparador hace que en caso de empate se mantenga a pesar del reverse que + aparece primero versiones locales y luego las remotas. + """ + for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(True)), reverse=True): + if package.get_version() >= package_request.get_version(): + if package_request.is_same_version(package): + print("EXACT;%s;%s" % (package, package.get_version())) + else: + print("COMPATIBLE;%s;%s" % (package, package.get_version())) + if parameters.name != '.': + sys.exit(0) + else: + print("UNSUITABLE;;") + sys.exit(1) + +# if __name__ == '__main__': +# csv_content = read_remote_csv('http://localhost:8080') +# reader = csv.reader(StringIO(csv_content), delimiter=';') +# print(list(reader)) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/common.yml b/node_modules/npm-mas-mas/cmaki_generator/common.yml new file mode 100644 index 0000000..11a2c76 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/common.yml @@ -0,0 +1,498 @@ +compilation_environments: &compilation_environments + windows_32-msvc_msc_ver_*-*: + generator: "Visual Studio 15 2017" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_msc_ver_*-*: + generator: "Visual Studio 15 2017 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-msvc_2015-*: + generator: "Visual Studio 14 2015" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_2015-*: + generator: "Visual Studio 14 2015 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-msvc_2017-*: + generator: "Visual Studio 15 2017" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_2017-*: + generator: "Visual Studio 15 2017 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-gcc_4-*: + generator: "Unix Makefiles" + ext_dyn: dll.a + ext_sta: a + windows_64-gcc_4-*: + generator: "Unix Makefiles" + ext_dyn: dll.a + ext_sta: a + linux_*_glibc_2.*-*_*-*: + generator: "Unix Makefiles" + ext_dyn: so + ext_sta: a + macos_64-clang_*-*: + generator: "Unix Makefiles" + ext_dyn: dylib + ext_sta: a + android_arm_api_*-gcc_*-*: + generator: "Unix Makefiles" + ext_dyn: so + ext_sta: a + +thirdparty_defaults: &thirdparty_defaults + platforms: + <<: *compilation_environments + version: null + version_manager: git + mask: wlmea + mode: dri + depends: null + source: skip + packing: true + build_windows: + | + @echo off + set CMAKI_INSTALL=%SELFHOME% + npm install + unittest: + | + int main() { return 0; } + priority: 30 + +library_dynamic: &library_dynamic + common: &library_dynamic_common + include: + - $PLATFORM/include + - include + windows: &library_dynamic_windows + <<: *library_dynamic_common + dynamic: + debug: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + relwithdebinfo: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + release: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + + unix: &library_dynamic_unix + <<: *library_dynamic_common + dynamic: + debug: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + relwithdebinfo: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + release: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + windows_*-msvc_*-*: + <<: *library_dynamic_windows + default: + <<: *library_dynamic_unix + +executable: &executable + windows: &executable_windows_common + executable: + release: + bin: + /*${TARGET}.exe/ + unix: &executable_unix_common + executable: + release: + bin: + /*${TARGET}/ + windows_*-msvc_*-*: + <<: *executable_windows_common + default: + <<: *executable_unix_common + +executable_exact: &executable_exact + windows: &executable_exact_windows_common + executable: + release: + bin: + - ${TARGET}.exe + - bin/${TARGET}.exe + - dll/${TARGET}.exe + debug: + bin: + - ${TARGET}.exe + - bin/${TARGET}.exe + - dll/${TARGET}.exe + unix: &executable_exact_unix_common + executable: + release: + bin: + - $TARGET + - bin/$TARGET + - dll/$TARGET + debug: + bin: + - $TARGET + - bin/$TARGET + - dll/$TARGET + windows_*-msvc_*-*: + <<: *executable_exact_windows_common + default: + <<: *executable_exact_unix_common + +library_dynamic_exact: &library_dynamic_exact + common: &library_dynamic_exact_common + include: + - $PLATFORM/include + - include + windows: &library_dynamic_exact_windows + <<: *library_dynamic_exact_common + dynamic: + debug: + dll: + - ${TARGET}d.dll + - bin/${TARGET}d.dll + - Debug/${TARGET}d.dll + - dll/${TARGET}d.dll + - ${TARGET}_D.dll + - bin/${TARGET}_D.dll + - Debug/${TARGET}_D.dll + - dll/${TARGET}_D.dll + - $TARGET.dll + - bin/$TARGET.dll + - Debug/$TARGET.dll + - dll/$TARGET.dll + lib: + - ${TARGET}d.lib + - lib/${TARGET}d.lib + - bin/${TARGET}d.lib + - Debug/${TARGET}d.lib + - dll/${TARGET}d.lib + - ${TARGET}_D.lib + - lib/${TARGET}_D.lib + - bin/${TARGET}_D.lib + - Debug/${TARGET}_D.lib + - dll/${TARGET}_D.lib + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - Debug/$TARGET.lib + - dll/$TARGET.lib + pdb: + - ${TARGET}d.pdb + - pdb/${TARGET}d.pdb + - bin/${TARGET}d.pdb + - Debug/${TARGET}d.pdb + - dll/${TARGET}d.pdb + - ${TARGET}_D.pdb + - pdb/${TARGET}_D.pdb + - bin/${TARGET}_D.pdb + - Debug/${TARGET}_D.pdb + - dll/${TARGET}_D.pdb + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - Debug/$TARGET.pdb + - dll/$TARGET.pdb + relwithdebinfo: + dll: + - $TARGET.dll + - bin/$TARGET.dll + - RelWithDebInfo/$TARGET.dll + - dll/$TARGET.dll + lib: + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - RelWithDebInfo/$TARGET.lib + - dll/$TARGET.lib + pdb: + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - RelWithDebInfo/$TARGET.pdb + - dll/$TARGET.pdb + release: + dll: + - $TARGET.dll + - bin/$TARGET.dll + - Release/$TARGET.dll + - dll/$TARGET.dll + lib: + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - Release/$TARGET.lib + - dll/$TARGET.lib + pdb: + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - Release/$TARGET.pdb + - dll/$TARGET.pdb + + unix: &library_dynamic_exact_unix + <<: *library_dynamic_exact_common + dynamic: + debug: + so: + - Debug/lib${TARGET}d.$EXT_DYN + - Debug/lib${TARGET}_D.$EXT_DYN + - Debug/lib${TARGET}_debug.$EXT_DYN + - Debug/lib${TARGET}-d.$EXT_DYN + - Debug/lib${TARGET}.$EXT_DYN + ##################### + - bin/lib${TARGET}d.$EXT_DYN + - bin/lib${TARGET}_D.$EXT_DYN + - bin/lib${TARGET}_debug.$EXT_DYN + - bin/lib${TARGET}-d.$EXT_DYN + - bin/lib${TARGET}.$EXT_DYN + ##################### + - lib/lib${TARGET}d.$EXT_DYN + - lib/lib${TARGET}_D.$EXT_DYN + - lib/lib${TARGET}_debug.$EXT_DYN + - lib/lib${TARGET}-d.$EXT_DYN + - lib/lib${TARGET}.$EXT_DYN + ##################### + - lib${ARCH}/lib${TARGET}d.$EXT_DYN + - lib${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib${ARCH}/lib${TARGET}.$EXT_DYN + ##################### + - lib${TARGET}d.$EXT_DYN + - lib${TARGET}_D.$EXT_DYN + - lib${TARGET}_debug.$EXT_DYN + - lib${TARGET}-d.$EXT_DYN + - lib${TARGET}.$EXT_DYN + ###################### + - lib/${ARCH}/lib${TARGET}d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}.$EXT_DYN + relwithdebinfo: + so: + - RelWithDebInfo/lib${TARGET}d.$EXT_DYN + - RelWithDebInfo/lib${TARGET}_D.$EXT_DYN + - RelWithDebInfo/lib${TARGET}_debug.$EXT_DYN + - RelWithDebInfo/lib${TARGET}-d.$EXT_DYN + - RelWithDebInfo/lib${TARGET}.$EXT_DYN + ##################### + - bin/lib${TARGET}d.$EXT_DYN + - bin/lib${TARGET}_D.$EXT_DYN + - bin/lib${TARGET}_debug.$EXT_DYN + - bin/lib${TARGET}-d.$EXT_DYN + - bin/lib${TARGET}.$EXT_DYN + ##################### + - lib/lib${TARGET}d.$EXT_DYN + - lib/lib${TARGET}_D.$EXT_DYN + - lib/lib${TARGET}_debug.$EXT_DYN + - lib/lib${TARGET}-d.$EXT_DYN + - lib/lib${TARGET}.$EXT_DYN + ##################### + - lib${ARCH}/lib${TARGET}d.$EXT_DYN + - lib${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib${ARCH}/lib${TARGET}.$EXT_DYN + ##################### + - lib${TARGET}d.$EXT_DYN + - lib${TARGET}_D.$EXT_DYN + - lib${TARGET}_debug.$EXT_DYN + - lib${TARGET}-d.$EXT_DYN + - lib${TARGET}.$EXT_DYN + ###################### + - lib/${ARCH}/lib${TARGET}d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}.$EXT_DYN + release: + so: + - Release/lib$TARGET.$EXT_DYN + - bin/lib$TARGET.$EXT_DYN + - lib/lib$TARGET.$EXT_DYN + - lib${ARCH}/lib$TARGET.$EXT_DYN + - lib$TARGET.$EXT_DYN + - lib/${ARCH}/lib$TARGET.$EXT_DYN + windows_*-msvc_*-*: + <<: *library_dynamic_exact_windows + default: + <<: *library_dynamic_exact_unix + +library_static: &library_static + common: &library_static_common + include: + - $PLATFORM/include + - include + static: + debug: + lib: + /*$TARGET*.$EXT_STA/ + relwithdebinfo: + lib: + /*$TARGET*.$EXT_STA/ + release: + lib: + /*$TARGET*.$EXT_STA/ + windows_*-msvc_*-*: + <<: *library_static_common + default: + <<: *library_static_common + +library_static_exact: &library_static_exact + common: &library_static_exact_common + include: + - $PLATFORM/include + - include + static: + debug: + lib: + - Debug/lib${TARGET}d.$EXT_STA + - Debug/lib${TARGET}-d.$EXT_STA + - Debug/lib${TARGET}.$EXT_STA + - Debug/${TARGET}d.$EXT_STA + - Debug/${TARGET}-d.$EXT_STA + - Debug/${TARGET}.$EXT_STA + ################ + - lib${TARGET}d.$EXT_STA + - lib${TARGET}-d.$EXT_STA + - lib${TARGET}.$EXT_STA + - ${TARGET}d.$EXT_STA + - ${TARGET}-d.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}d.$EXT_STA + - lib/lib${TARGET}-d.$EXT_STA + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}d.$EXT_STA + - lib/${TARGET}-d.$EXT_STA + - lib/${TARGET}.$EXT_STA + relwithdebinfo: + lib: + - RelWithDebInfo/lib${TARGET}d.$EXT_STA + - RelWithDebInfo/lib${TARGET}-d.$EXT_STA + - RelWithDebInfo/lib${TARGET}.$EXT_STA + - RelWithDebInfo/${TARGET}d.$EXT_STA + - RelWithDebInfo/${TARGET}-d.$EXT_STA + - RelWithDebInfo/${TARGET}.$EXT_STA + ################ + - lib${TARGET}d.$EXT_STA + - lib${TARGET}-d.$EXT_STA + - lib${TARGET}.$EXT_STA + - ${TARGET}d.$EXT_STA + - ${TARGET}-d.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}d.$EXT_STA + - lib/lib${TARGET}-d.$EXT_STA + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}d.$EXT_STA + - lib/${TARGET}-d.$EXT_STA + - lib/${TARGET}.$EXT_STA + release: + lib: + - Release/lib${TARGET}.$EXT_STA + - Release/${TARGET}.$EXT_STA + ################ + - lib${TARGET}.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}.$EXT_STA + windows_*-msvc_*-*: + <<: *library_static_exact_common + default: + <<: *library_static_exact_common + +# when need distribute dll (only windows) but dont need linking +library_only_dll: &library_only_dll + windows: &library_only_dll_windows + add_3rdparty_dependencies: false + lib_provided: false + dynamic: + debug: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + relwithdebinfo: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + release: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + +library_dynamic_boost: &library_dynamic_boost + common: &common_boost + include: + - $PLATFORM/include + - include + definitions: + - -D${PACKAGE_UPPER}_DYN_LINK + windows: &windows_dynamic_boost + <<: *common_boost + dynamic: + debug: + dll: + /$TARGET-*-mt-*d-*_*.dll/ + lib: + /$TARGET-*-mt-*d-*_*.lib/ + pdb: + null + relwithdebinfo: + dll: + /$TARGET-*-mt-*_*.dll/ + lib: + /$TARGET-*-mt-*_*.dll/ + pdb: + null + release: + dll: + /$TARGET-*-mt-*_*.dll/ + lib: + /$TARGET-*-mt-*_*.lib/ + pdb: + null + unix: &unix_dynamic_boost + <<: *common_boost + dynamic: + debug: + so: + /lib$TARGET-*-mt-*d-*_*.$EXT_DYN/ + relwithdebinfo: + so: + /lib$TARGET-*-mt-*_*.$EXT_DYN/ + release: + so: + /lib$TARGET-*-mt-*_*.$EXT_DYN/ + windows_*-msvc_*-*: + <<: *windows_dynamic_boost + default: + <<: *unix_dynamic_boost + diff --git a/node_modules/npm-mas-mas/cmaki_generator/compilation.py b/node_modules/npm-mas-mas/cmaki_generator/compilation.py new file mode 100644 index 0000000..b80af0f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/compilation.py @@ -0,0 +1,238 @@ +import os +import utils +import logging +import pipeline +from itertools import product +from third_party import platforms +from third_party import CMAKELIB_URL +from third_party import get_identifier + + +def search_cmakelib(): + # compilando desde cmaki_generator + cmakelib_dir = os.path.join('output', '3rdparties', 'cmaki') + if not os.path.isdir(cmakelib_dir): + # compilando una dependencia + cmakelib_dir = os.path.join('..', 'cmaki') + if not os.path.isdir(cmakelib_dir): + # compilando proeycto raiz + cmakelib_dir = os.path.join('node_modules', 'cmaki') + if not os.path.isdir(cmakelib_dir): + raise Exception("not found cmaki: {}".format(os.path.abspath(cmakelib_dir))) + return os.path.abspath(cmakelib_dir) + + +def compilation(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + package_norm = node.get_package_name_norm() + version = node.get_version() + + cmake3p_dir = parameters.prefix + cmake3p_dir = utils.get_norm_path(cmake3p_dir) + cmake3p_dir = cmake3p_dir.replace('\\', '/') + + cmakefiles_dir = search_cmakelib() + + package_upper = node.get_package_name_norm_upper() + parms = node.parameters + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + install_directory = node.get_install_directory(plat) + utils.trymkdir(install_directory) + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + utils.trymkdir(build_directory) + with utils.working_directory(build_directory): + # get generator and platform info + for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): + + logging.info('-- compilation mode: %s plat: %s' % (build_mode, plat)) + + ############# 1. prepare vars + + if build_mode.lower() == 'debug': + try: + env_modified['CFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' + elif build_mode.lower() == 'relwithdebinfo': + try: + env_modified['CFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-g -O2 -DNDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-g -O2 -DNDEBUG' + elif build_mode.lower() == 'release': + # default packages assume came in release + try: + env_modified['CFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-O3 -DNDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-O3 -DNDEBUG' + + cores = utils.detect_ncpus() + half_cores = cores / 2 + env_modified['CORES'] = str(cores) + env_modified['HALF_CORES'] = str(half_cores) + env_modified['GTC_PREFIX'] = parameters.prefix + env_modified['CMAKELIB_URL'] = CMAKELIB_URL + env_modified['BUILD_MODE'] = str(build_mode) + # env_modified['NPP_SERVER'] = ... + env_modified['SOURCES'] = os.path.abspath(os.path.join('..', node.get_download_directory())) + env_modified['CMAKI_DIR'] = cmakefiles_dir + env_modified['SELFHOME'] = install_directory + env_modified['CMAKI_PWD'] = build_directory + env_modified['CMAKI_INSTALL'] = install_directory + + ################# + # remove cmake3p of node + node.remove_cmake3p(cmake3p_dir) + + # show env vars + node.show_environment_vars(env_modified) + + # remove CMakeCache.txt for avoid problems when + # change of generator + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + ################# + + generator_extra = '' + if generator is not None: + generator_extra = '-G"%s"' % generator + + cmakefiles_dir = parameters.cmakefiles + cmakefiles_dir = cmakefiles_dir.replace('\\', '/') + + cmake_prefix_path = parameters.third_party_dir + cmake_prefix_path = cmake_prefix_path.replace('\\', '/') + + build_directory = build_directory.replace('\\', '/') + + # resolve replace maps + compiler_replace_resolved = {} + for var, value in compiler_replace_maps.items(): + newvalue = value + newvalue = newvalue.replace('$PLATFORM', plat) + compiler_replace_resolved[var] = newvalue + + # begin definitions cmake + try: + cmake_definitions_list_original = parms['cmake_definitions'] + cmake_definitions_list = [] + for define in cmake_definitions_list_original: + # TODO: resolver tus variables directas e indirectas (de dependencias) + define = define.replace('$%s_HOME' % package_norm, install_directory) + # apply replaces + cmake_definitions_list.append( utils.apply_replaces(define, compiler_replace_resolved) ) + except KeyError: + cmake_definitions_list = [] + + # add cflags and cppflags to cmake_definitions + try: + cmake_definitions_list.append( 'CMAKE_C_FLAGS="%s"' % env_modified['CFLAGS'] ) + except KeyError: + pass + try: + cmake_definitions_list.append( 'CMAKE_CXX_FLAGS="%s"' % env_modified['CPPFLAGS'] ) + except KeyError: + pass + + definitions_extra = '' + for definition in cmake_definitions_list: + definitions_extra += ' -D%s' % definition + # end definitions cmake + + if (not 'CMAKE_TOOLCHAIN_FILE' in env_modified) or (not env_modified['CMAKE_TOOLCHAIN_FILE']) or (env_modified['CMAKE_TOOLCHAIN_FILE'] == "no cross compile"): + cmake_toolchain_file_filepath='' + else: + cmake_toolchain_file_filepath=' -DCMAKE_TOOLCHAIN_FILE="{}"'.format(env_modified['CMAKE_TOOLCHAIN_FILE']) + + cmake_prefix = node.get_cmake_prefix() + cmake_configure = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKE_MODULE_PATH=%s -DCMAKI_PATH=%s -DCMAKE_BUILD_TYPE=%s -DCMAKE_PREFIX_PATH=%s -DPACKAGE=%s -DPACKAGE_UPPER=%s -DPACKAGE_VERSION=%s -DPACKAGE_BUILD_DIRECTORY=%s -DCMAKI_COMPILER=%s -DCMAKI_IDENTIFIER=%s -DCMAKI_PLATFORM=%s %s %s' % (generator_extra, cmake_prefix, cmake3p_dir, cmakefiles_dir, cmakefiles_dir, build_mode, cmake_prefix_path, package, package_upper, version, build_directory, get_identifier('COMPILER'), get_identifier('ALL'), get_identifier('ALL'), definitions_extra, cmake_toolchain_file_filepath) + + target = node.get_cmake_target() + if target is not None: + cmake_build = 'cmake --build . --target %s --config %s' % (target, build_mode) + else: + cmake_build = 'cmake --build . --config %s' % build_mode + + env_modified['CMAKE_CONFIGURE'] = cmake_configure.replace(r'"', r"'") + env_modified['CMAKE_BUILD'] = cmake_build.replace(r'"', r"'") + + ########## 2. execute + + executed_build_script = False + if utils.is_windows(): + for build_script in ['.build.cmd', 'build.cmd']: + if os.path.exists(build_script): + # execute manual build script + node.ret += abs(utils.safe_system('%s %s %s %s %s %s' % (build_script, install_directory, package, version, plat, build_mode), env=env_modified)) + executed_build_script = True + else: + for build_script in ['.build.sh', 'build.sh']: + if os.path.exists(build_script): + # show vars + node.show_environment_vars(env_modified) + + node.ret += abs(utils.safe_system('chmod +x %s && ./%s %s %s %s %s %s' % (build_script, build_script, install_directory, package, version, plat, build_mode), env=env_modified)) + executed_build_script = True + + if not executed_build_script: + logging.debug('configure command: %s' % cmake_configure) + + ret = utils.safe_system(cmake_configure, env=env_modified) + if ret == 0: + logging.debug('build command: %s' % cmake_configure) + node.ret += abs(utils.safe_system(cmake_build, env=env_modified)) + else: + logging.warning('Configuration failed. See log: %s' % parameters.log) + node.ret += abs(ret) + + ######## 3. manual install + + # post-install + logging.debug('begin post-install') + for bc in node.get_post_install(): + chunks = [x.strip() for x in bc.split(' ') if x] + if(len(chunks) != 2) and (len(chunks) != 3): + raise Exception('Invalid value in post_install: %s. Expected [source pattern destiny]' % bc) + + source_folder = os.path.join(build_directory, os.path.dirname(chunks[0])) + install_directory_chunk = os.path.join(install_directory, chunks[1]) + pattern = os.path.basename(chunks[0]) + logging.debug('copy %s/%s to %s' % (source_folder, pattern, install_directory_chunk)) + + # create directory if not exists + utils.trymkdir(install_directory_chunk) + + p = pipeline.make_pipe() + # begin + if len(chunks) == 3: + p = pipeline.find(source_folder, 99)(p) + else: + p = pipeline.find(source_folder, 0)(p) + p = pipeline.grep_basename(pattern)(p) + p = pipeline.copy(source_folder, install_directory_chunk)(p) + p = pipeline.debug('copied ')(p) + # end + pipeline.end_pipe()(p) + logging.debug('end post-install') + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "compilation"') + break + + # finish well + return True diff --git a/node_modules/npm-mas-mas/cmaki_generator/download_package.py b/node_modules/npm-mas-mas/cmaki_generator/download_package.py new file mode 100644 index 0000000..23fc656 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/download_package.py @@ -0,0 +1,11 @@ +from requests import get # to make GET request + +def download_from_url(url, file_name): + with open(file_name, "wb") as file: + response = get(url) + file.write(response.content) + +url = 'http://localhost:8080/cpp/download.php?file=json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz' + +print( download_from_url(url, "json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz") ) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/get_package.py b/node_modules/npm-mas-mas/cmaki_generator/get_package.py new file mode 100755 index 0000000..e450ee0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/get_package.py @@ -0,0 +1,26 @@ +import os +import sys +import logging +import argparse +import urllib +import csv +import utils + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) + parameters = parser.parse_args() + + depends_file = parameters.depends + if os.path.exists(depends_file): + data = utils.deserialize(depends_file) + # data = utils.deserialize_json(depends_file) + else: + data = {} + if parameters.name in data: + print (data[parameters.name]) + sys.exit(0) + else: + sys.exit(1) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py b/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py new file mode 100644 index 0000000..c407dd7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py @@ -0,0 +1,35 @@ +import logging + + +def set_state(rets, key, value): + if not key in rets: + rets[key] = value + else: + logging.warning('Received in pipeline multiples packages with same name and version: %s' % key) + set_state(rets, key + '_', value) + + +def get_return_code(parameters, rets): + def process(packages): + for node in packages: + try: + # process package + name = node.get_package_name() + version = node.get_version() + + if len(node.exceptions) > 0: + state = "EXCEPTION in %s" % node.fail_stage + elif node.interrupted: + state = "INTERRUPTED in %s" % node.fail_stage + elif (node.ret != 0): + state = "FAILED in %s" % node.fail_stage + else: + state = "OK" + + key = '%s - %s' % (name, version) + set_state(rets, key, state) + finally: + # send to next step + yield node + return process + diff --git a/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt new file mode 100644 index 0000000..2d06137 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt @@ -0,0 +1,47 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) + +include_directories(gwen/include) + +#ADD_DEFINITIONS(-DGWEN_COMPILE_STATIC -D_HAS_EXCEPTIONS=0 -D_STATIC_CPPLIB) +ADD_DEFINITIONS(-DGWEN_COMPILE_DLL) + +IF(WIN32) + +ELSE() + add_definitions(-std=c++11) +ENDIF() + +file(GLOB SOURCE_CODE1 gwen/src/*.cpp) +file(GLOB SOURCE_CODE2 gwen/src/Controls/*.cpp) +file(GLOB SOURCE_CODE3 gwen/src/Controls/Dialog/*.cpp) +file(GLOB SOURCE_CODE4 gwen/src/Platforms/*.cpp) + +add_library(${PACKAGE} SHARED ${SOURCE_CODE1} ${SOURCE_CODE2} ${SOURCE_CODE3} ${SOURCE_CODE4}) + +file(GLOB HEADER_CODE1 gwen/include/Gwen/*.h) +INSTALL( FILES ${HEADER_CODE1} + DESTINATION "include/${PACKAGE}") + +file(GLOB HEADER_CODE2 gwen/include/Gwen/Controls/*.h) +INSTALL( FILES ${HEADER_CODE2} + DESTINATION "include/${PACKAGE}/Controls") + +file(GLOB HEADER_CODE3 gwen/include/Gwen/Controls/Dialog/*.h) +INSTALL( FILES ${HEADER_CODE3} + DESTINATION "include/${PACKAGE}/Controls/Dialog") + +file(GLOB HEADER_CODE4 gwen/include/Gwen/Input/*.h) +INSTALL( FILES ${HEADER_CODE4} + DESTINATION "include/${PACKAGE}/Input") + +file(GLOB HEADER_CODE5 gwen/include/Gwen/Renderers/*.h) +INSTALL( FILES ${HEADER_CODE5} + DESTINATION "include/${PACKAGE}/Renderers") + +file(GLOB HEADER_CODE6 gwen/include/Gwen/Skins/*.h) +INSTALL( FILES ${HEADER_CODE6} + DESTINATION "include/${PACKAGE}/Skins") + +INSTALL( FILES gwen/bin/DefaultSkin.png + DESTINATION "bin") diff --git a/node_modules/npm-mas-mas/cmaki_generator/hash_version.py b/node_modules/npm-mas-mas/cmaki_generator/hash_version.py new file mode 100644 index 0000000..f5e56cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/hash_version.py @@ -0,0 +1,172 @@ +import os +import contextlib +import utils +import time +from datetime import datetime +from utils import get_stdout +from email.utils import parsedate + + +def get_revision_svn(repo): + ''' + This command need svn in PATH + ''' + cmd = "svn info %s" % repo + for line in get_stdout(cmd): + if line.startswith('Last') or (line.startswith('Revisi') and (line.find('cambio') != -1)): + pos = line.rindex(':') + return int(line[pos+2:]) + return -1 + + +def get_timestamp_from_changeset(repo, changeset_searched): + ''' + generator of commits + ''' + with utils.working_directory(repo): + lines = [] + for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): + lines.append(line) + for line in reversed(lines): + chunks = line.split(";") + assert(len(chunks) == 2) + changeset = chunks[0] + timestamp = int(time.mktime(parsedate(chunks[1]))) + if changeset_searched == changeset: + return timestamp + raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched)) + + +def git_log_gen(repo, number=1, extra=''): + ''' + generator of commits + ''' + with utils.working_directory(repo): + for line in get_stdout('git log -%d %s' % (number, extra)): + if line.startswith('commit'): + parts = line.split(' ') + assert(len(parts) == 2) + commit_name = parts[1] + yield commit_name + + +def get_changeset_git_from_position(repo, position = 0): + with utils.working_directory(repo): + i = 1 + lines = [] + for line in get_stdout('git log'): + lines.append(line) + for line in reversed(lines): + if line.startswith('commit'): + parts = line.split(' ') + assert(len(parts) == 2) + commit_name = parts[1] + if i == position: + return commit_name + else: + i += 1 + raise Exception('Error in get git hash from position {}'.format(position)) + + +def get_changeset_from_timestamp(repo, timestamp_searched): + with utils.working_directory(repo): + lines = [] + for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): + lines.append(line) + for line in reversed(lines): + chunks = line.split(";") + assert(len(chunks) == 2) + changeset = chunks[0] + timestamp = int(time.mktime(parsedate(chunks[1]))) + if timestamp_searched == timestamp: + return changeset + raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched)) + + +def get_position_git_from_changeset(repo, changeset): + with working_directory(repo): + i = 1 + lines = [] + for line in get_stdout('git log'): + lines.append(line) + for line in reversed(lines): + if line.startswith('commit'): + parts = line.split(' ') + if len(parts) == 2: + commit_name = parts[1] + if commit_name == changeset: + return i + else: + i += 1 + return -1 + + +def get_last_changeset(repo, short=False): + for changeset in git_log_gen(repo, number=1): + if short: + return changeset[:7] + else: + return changeset + return "" + + +def get_last_version(repo): + return to_cmaki_version(repo, get_last_changeset(repo)) + + +def rehash_simple(commit_name, position): + separator = '000' + return int(separator.join(list(str(ord(character)) for character in commit_name))) % position + + +@contextlib.contextmanager +def working_directory(path): + prev_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +def to_cmaki_version(repo, changeset): + ''' + git hash ----> 0.0.x.x + ''' + position = get_timestamp_from_changeset(repo, changeset) + hash_simple = rehash_simple(changeset, position) + versions = [] + versions.append('0') + versions.append('0') + versions.append(str(position)) + versions.append(str(hash_simple)) + return '.'.join(versions) + + +def to_git_version(repo, version): + ''' + 0.0.x.x ----> git hash + ''' + version = version.split('.') + assert(len(version) == 4) + position = int(version[2]) + pseudohash = int(version[3]) + changeset = get_changeset_from_timestamp(repo, position) + hash_simple = rehash_simple(changeset, position) + assert( get_timestamp_from_changeset(repo, changeset) == position ) + assert( hash_simple == pseudohash ) + return changeset + + +if __name__ == '__main__': + + local_path = r'/home/ricardo/dev/fast-event-system' + + for commit_name in git_log_gen(local_path, 10): + cmaki_version = to_cmaki_version(local_path, commit_name) + print ("%s -> %s" % (commit_name, cmaki_version)) + commit_name2 = to_git_version(local_path, cmaki_version) + print ("%s -> %s" % (cmaki_version, commit_name2)) + print () + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl new file mode 100644 index 0000000..8ba21f4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl @@ -0,0 +1,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BuildName: + BuildStamp: + Name: + Generator: + CompilerName: + OSName: + Hostname: + OSRelease: + OSVersion: + OSPlatform: + Is64Bits: + VendorString: + VendorID: + FamilyID: + ModelID: + ProcessorCacheSize: + NumberOfLogicalCPU: + NumberOfPhysicalCPU: + TotalVirtualMemory: + TotalPhysicalMemory: + LogicalProcessorsPerPhysical: + ProcessorClockFrequency: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/junit/README.md b/node_modules/npm-mas-mas/cmaki_generator/junit/README.md new file mode 100644 index 0000000..4f989c6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/junit/README.md @@ -0,0 +1,3 @@ +# Source +https://bitbucket.org/shackra/ctest-jenkins/ + diff --git a/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt new file mode 100644 index 0000000..bc1e512 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt @@ -0,0 +1,687 @@ +#=================================== +# Build script for libRocket ======= +#=================================== + +if(APPLE) + if(IOS_PLATFORM) + set(CMAKE_TOOLCHAIN_FILE cmake/Platform/iOS.cmake) + endif(IOS_PLATFORM) +endif(APPLE) + +# We use the new OSX_ARCHITECTURES property +# and GNUInstallDirs module +cmake_minimum_required(VERSION 2.8.5) + +if(COMMAND cmake_policy) + cmake_policy(SET CMP0015 NEW) +endif(COMMAND cmake_policy) + +project(libRocket C CXX) + +# paths +include(GNUInstallDirs) + +set(LIBROCKET_VERSION_MAJOR 1) +set(LIBROCKET_VERSION_MINOR 3) +set(LIBROCKET_VERSION_PATCH 0) +set(LIBROCKET_VERSION_TWEAK 0) +set(PROJECT_VERSION ${LIBROCKET_VERSION_MAJOR}.${LIBROCKET_VERSION_MINOR}.${LIBROCKET_VERSION_PATCH}.${LIBROCKET_VERSION_TWEAK}) + +# Search in the 'cmake' directory for additional CMake modules. +list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) + +# Old versions of CMake need some updated Modules, but we don't want +# to override newer versions of CMake which have working versions +if(CMAKE_MAJOR_VERSION LESS 3) + list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/v2fixes) +endif() + +#=================================== +# Environment tests ================ +#=================================== + +include(TestForANSIForScope) +include(TestForANSIStreamHeaders) +include(TestForSTDNamespace) + +#=================================== +# Provide hints as to where depends= +# might be found = +#=================================== + +if(NOT DEFINED ENV{FREETYPE_DIR}) + set(ENV{FREETYPE_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{Boost_DIR}) + set(ENV{Boost_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{LUA_DIR}) + set(ENV{LUA_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SDLDIR}) + set(ENV{SDLDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SDLIMAGEDIR}) + set(ENV{SDLIMAGEDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SFML_ROOT}) + set(ENV{SFML_ROOT} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +#=================================== +# Plaform specific global hacks ==== +#=================================== + +if(APPLE) + # Disables naked builtins from AssertMacros.h which + # This prevents naming collisions such as those from the check() + # function macro with LuaType::check + add_definitions(-D__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0) +endif(APPLE) + +#=================================== +# Build options ==================== +#=================================== + +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE Release CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." + FORCE) +endif() + +if(NOT IOS) + option(BUILD_SHARED_LIBS "Build shared libraries" ON) +endif(NOT IOS) + +option(BUILD_PYTHON_BINDINGS "Build python bindings" OFF) +option(BUILD_LUA_BINDINGS "Build Lua bindings" OFF) +option(BUILD_SAMPLES "Build samples" OFF) +if(WIN32) + option(SKIP_DIRECTX_SAMPLES "Skip build of all DirectX related samples. Only applies if BUILD_SAMPLES is ON" OFF) + option(SKIP_DIRECTX9_SAMPLE "Skip build of DirectX 9 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) + option(SKIP_DIRECTX10_SAMPLE "Skip build of DirectX 10 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) +endif() + +if(IOS) + if(BUILD_SHARED_LIBS) + message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") + endif(BUILD_SHARED_LIBS) +endif(IOS) + +if(IOS) + if(BUILD_SHARED_LIBS) + message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") + endif(BUILD_SHARED_LIBS) +endif(IOS) + +if(NOT BUILD_SHARED_LIBS) + add_definitions(-DSTATIC_LIB) +endif() + +#on windows, check for VC10 and fix the multiple compile target issue. +IF(WIN32) + if(MSVC) + if(${MSVC_VERSION} STREQUAL 1600 OR ${MSVC_VERSION} STRGREATER 1600) + message("Visual Studio 2010 (${MSVC_VERSION}) build fix at play (/FORCE:MULTIPLE)") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /FORCE:MULTIPLE") + endif() + endif() +ENDIF(WIN32) + + +#=================================== +# Find dependencies ================ +#=================================== + +cmaki_find_package(dune-freetype) +include_directories(${DUNE-FREETYPE_INCLUDE_DIRS}) +list(APPEND CORE_LINK_LIBS ${DUNE-FREETYPE_LIBRARIES}) + +# # FreeType +# if(CMAKE_MAJOR_VERSION LESS 3) +# # Freetype changed the layout of its header files, we need to use +# # the FindFreetype module from cmake v3 at least, included here +# find_package(Freetype-v2fix REQUIRED) +# else() +# find_package(Freetype REQUIRED) +# endif() +# +# if(FREETYPE_FOUND) +# include_directories(${FREETYPE_INCLUDE_DIRS}) +# link_directories(${FREETYPE_LINK_DIRS}) +# list(APPEND CORE_LINK_LIBS ${FREETYPE_LIBRARY}) +# endif() +# mark_as_advanced(FREETYPE_INCLUDE_DIRS FREETYPE_LIBRARY FREETYPE_LINK_DIRECTORIES) + +# Boost and Python +if(BUILD_PYTHON_BINDINGS) + find_package(PythonInterp 2 REQUIRED) + find_package(PythonLibs 2 REQUIRED) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -c "from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,prefix=''))" + OUTPUT_VARIABLE PYTHON_INSTDIR + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + if(PYTHONLIBS_FOUND) + include_directories(${PYTHON_INCLUDE_DIR}) + endif() + + #set(Boost_USE_STATIC_LIBS OFF) + #set(Boost_USE_MULTITHREADED ON) + find_package(Boost 1.40.0 COMPONENTS python REQUIRED) + if(Boost_FOUND) + include_directories(${Boost_INCLUDE_DIR}) + list(APPEND PY_BINDINGS_LINK_LIBS ${PYTHON_LIBRARY} ${Boost_LIBRARIES}) + endif() + +endif() + +#Lua +if(BUILD_LUA_BINDINGS) + if(CMAKE_MAJOR_VERSION LESS 3) + find_package(Lua-v2fix) + else() + find_package(Lua) + endif() + if(LUA_FOUND) + include_directories(${LUA_INCLUDE_DIR}) + list(APPEND LUA_BINDINGS_LINK_LIBS ${LUA_LIBRARIES}) + endif() +endif() + + +#=================================== +# Setup paths ====================== +#=================================== + +set(PROJECT_SOURCE_DIR ${PROJECT_SOURCE_DIR}/..) + +include_directories( + ${PROJECT_SOURCE_DIR}/Include +) + +# Include list of source files +include(FileList) + +#=================================== +# Build libraries ================== +#=================================== + +set(LIBRARIES Core Controls Debugger) + +foreach(library ${LIBRARIES}) + set(NAME Rocket${library}) + + add_library(${NAME} ${${library}_SRC_FILES} + ${${library}_HDR_FILES} + ${${library}_PUB_HDR_FILES} + ${MASTER_${library}_PUB_HDR_FILES} + ) + + set_target_properties(${NAME} PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${LIBROCKET_VERSION_MAJOR} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "i386;x86_64;" + ) + endif(NOT IOS) + endif(APPLE) + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + ) +endforeach(library) + +# Build python bindings +if(BUILD_PYTHON_BINDINGS) + set(LIBRARIES core controls) + + foreach(library ${LIBRARIES}) + set(NAME _rocket${library}) + + add_library(${NAME} MODULE ${Py${library}_SRC_FILES} + ${Py${library}_HDR_FILES} + ${Py${library}_PUB_HDR_FILES} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" + ) + endif(NOT IOS) + endif(APPLE) + + set_target_properties(${NAME} PROPERTIES PREFIX "") + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${PYTHON_INSTDIR} + ) + endforeach(library) +endif() + +# Build Lua bindings +if(BUILD_LUA_BINDINGS) + set(LIBRARIES Core Controls) + + foreach(library ${LIBRARIES}) + set(NAME Rocket${library}Lua) + + add_library(${NAME} ${Lua${library}_SRC_FILES} + ${Lua${library}_HDR_FILES} + ${Lua${library}_PUB_HDR_FILES} + ) + + set_target_properties(${NAME} PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${LIBROCKET_VERSION_MAJOR} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" + ) + endif(NOT IOS) + endif(APPLE) + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + ) + endforeach(library) +endif() + + +#=================================== +# Link libraries =================== +#=================================== + +target_link_libraries(RocketCore ${CORE_LINK_LIBS}) +target_link_libraries(RocketControls RocketCore) +target_link_libraries(RocketDebugger RocketCore) + +if(BUILD_PYTHON_BINDINGS) + target_link_libraries(_rocketcore RocketCore ${PY_BINDINGS_LINK_LIBS}) + target_link_libraries(_rocketcontrols RocketControls ${PY_BINDINGS_LINK_LIBS}) +endif() + +if(BUILD_LUA_BINDINGS) + target_link_libraries(RocketCoreLua RocketCore ${LUA_BINDINGS_LINK_LIBS}) + target_link_libraries(RocketControlsLua RocketControls RocketCoreLua ${LUA_BINDINGS_LINK_LIBS}) +endif() + + +#=================================== +# Build samples ==================== +#=================================== + +# Build and link the samples +macro(bl_sample NAME) + if (WIN32) + add_executable(${NAME} WIN32 ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + elseif(APPLE) + add_executable(${NAME} MACOSX_BUNDLE ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + else() + add_executable(${NAME} ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + endif() + + if (APPLE) + # We only support i386 for the samples as it still uses Carbon + set_target_properties(${NAME} PROPERTIES OSX_ARCHITECTURES "i386;" ) + endif() + + target_link_libraries(${NAME} ${ARGN}) +endmacro() + +if(BUILD_SAMPLES) + include(SampleFileList) + + set(samples treeview customlog drag loaddocument) + set(tutorials template datagrid datagrid_tree tutorial_drag) + + set(sample_LIBRARIES + shell + RocketCore + RocketControls + RocketDebugger + ) + + # Find OpenGL + find_package(OpenGL REQUIRED) + + if(OPENGL_FOUND) + include_directories(${OPENGL_INCLUDE_DIR}) + list(APPEND sample_LIBRARIES ${OPENGL_LIBRARIES}) + endif() + + # Set up required system libraries + if(WIN32) + if(SKIP_DIRECTX_SAMPLES) + message("-- Skipping all DirectX samples") + set(SKIP_DIRECTX9_SAMPLE ON) + set(SKIP_DIRECTX10_SAMPLE ON) + else() + message("-- Determing if DirectX samples can be built") + include(FindDirectX) + find_package(DirectX) + if(DirectX_FOUND) + set(DIRECTX_SAMPLE_LIST) + set(DIRECTX_SKIPPED_SAMPLE_LIST) + + # We should be able to build DirectX 9 sample + message("-- Determing if DirectX samples can be built - Yes") + + if(SKIP_DIRECTX9_SAMPLE) + message("-- Skipping build of DirectX 9 sample: User disabled") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + else() + if(DirectX_LIBRARY) + if(DirectX_D3DX9_LIBRARY) + list(APPEND DIRECTX_SAMPLE_LIST "DirectX9 ") + else() + set(SKIP_DIRECTX9_SAMPLE ON) + message("-- Skipping build of DirectX 9 sample: DirectX_D3DX9_LIBRARY not found") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + endif() + else() + set(SKIP_DIRECTX9_SAMPLE ON) + message("-- Skipping build of DirectX 9 sample: DirectX_LIBRARY not found") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + endif() + endif() + + if(SKIP_DIRECTX10_SAMPLE) + message("-- Skipping build of DirectX 10 sample: User disabled") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") + else() + if(DirectX_D3D10_FOUND) + list(APPEND DIRECTX_SAMPLE_LIST "DirectX10 ") + else() + set(SKIP_DIRECTX10_SAMPLE ON) + message("-- Skipping build of DirectX 10 sample: Missing DirectX_D3D10_INCLUDE_DIR, DirectX_D3D10_LIBRARY or DirectX_D3DX10_LIBRARY") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") + endif() + endif() + + + if(DIRECTX_SAMPLE_LIST) + message("-- Enabled DirectX samples: " ${DIRECTX_SAMPLE_LIST}) + endif() + if(DIRECTX_SKIPPED_SAMPLE_LIST) + message("-- Disabled DirectX samples: " ${DIRECTX_SKIPPED_SAMPLE_LIST}) + endif() + else() + message("-- Determing if DirectX samples can be built - No") + set(SKIP_DIRECTX9_SAMPLE ON) + set(SKIP_DIRECTX10_SAMPLE ON) + endif() + endif() + elseif(APPLE) + include(FindCarbon) + find_package(Carbon REQUIRED) + + if (Carbon_FOUND) + include_directories(${Carbon_INCLUDE_DIR}) + list(APPEND sample_LIBRARIES ${Carbon_LIBRARIES}) + endif() + else() + find_package(X11 REQUIRED) + if (X11_FOUND) + list(APPEND sample_LIBRARIES ${X11_LIBRARIES}) + # shell/src/x11/InputX11.cpp:InitialiseX11Keymap uses Xkb if + # possible instead of XGetKeyboardMapping for performance + if(X11_Xkb_FOUND) + FIND_PACKAGE_MESSAGE(X11 "Found X11 KBlib: ${X11_X11_LIB}" "[${X11_X11_LIB}][${X11_XkbINCLUDE_DIR}]") + add_definitions(-DHAS_X11XKBLIB) + endif() + endif() + endif() + + set(SAMPLES_DIR opt/Rocket/Samples CACHE PATH "path to samples dir") + + # The samples and tutorials use the shell library + include_directories(${PROJECT_SOURCE_DIR}/Samples/shell/include) + + # Build and install sample shell library + add_library(shell STATIC ${shell_SRC_FILES} ${shell_HDR_FILES}) + if (APPLE) + # We only support i386 for the samples as it still uses Carbon + set_target_properties(shell PROPERTIES OSX_ARCHITECTURES "i386;") + endif() + + # Build and install the basic samples + foreach(sample ${samples}) + bl_sample(${sample} ${sample_LIBRARIES}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/${sample}) + install(TARGETS ${sample} + RUNTIME DESTINATION ${SAMPLES_DIR}/${sample} + BUNDLE DESTINATION ${SAMPLES_DIR}) + endforeach() + + if(WIN32) + if(NOT SKIP_DIRECTX9_SAMPLE) + include_directories(${DirectX_INCLUDE_DIR}) + + bl_sample(directx ${sample_LIBRARIES} ${DirectX_LIBRARY} ${DirectX_D3DX9_LIBRARY}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx) + install(TARGETS directx + RUNTIME DESTINATION ${SAMPLES_DIR}/directx + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + if(NOT SKIP_DIRECTX10_SAMPLE) + include_directories(${DirectX_INCLUDE_DIR} ${DirectX_D3D10_INCLUDE_DIRS}) + + bl_sample(directx10 ${sample_LIBRARIES} ${DirectX_D3D10_LIBRARIES}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx10) + install(TARGETS directx10 + RUNTIME DESTINATION ${SAMPLES_DIR}/directx10 + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + endif() + + message("-- Can SDL2 sample be built") + find_package(SDL) + if(SDL_FOUND) + find_package(SDL_image) + if(SDL_IMAGE_FOUND) + find_package(GLEW) + if(GLEW_FOUND) + message("-- Can SDL2 sample be built - yes") + include_directories(${SDL_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) + + bl_sample(sdl2 ${sample_LIBRARIES} ${SDL_LIBRARY} ${SDL_IMAGE_LIBRARY} ${GLEW_LIBRARY}) + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sdl2) + install(TARGETS sdl2 + RUNTIME DESTINATION ${SAMPLES_DIR}/sdl2 + BUNDLE DESTINATION ${SAMPLES_DIR}) + else() + message("-- Can SDL2 sample be built - GLEW not found") + endif() + else() + message("-- Can SDL2 sample be built - SDL2_image not found") + endif() + else() + message("-- Can SDL2 sample be built - SDL2 not found") + endif() + + + message("-- Can SFML 1.x sample be built") + find_package(SFML 1 COMPONENTS graphics window system) + if(NOT SFML_FOUND) + message("-- Can SFML 1.x sample be built - no") + elseif(SFML_VERSION_MAJOR GREATER 1) + message("-- Can SFML 1.x sample be built - no: Version 2 detected") + else() + message("-- Can SFML 1.x sample be built - yes") + + include_directories(${SFML_INCLUDE_DIR}) + + bl_sample(sfml ${sample_LIBRARIES} ${SFML_LIBRARIES}) + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml) + install(TARGETS sfml + RUNTIME DESTINATION ${SAMPLES_DIR}/sfml + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + message("-- Can SFML 2.x sample be built") + find_package(SFML 2 COMPONENTS graphics window system) + if(NOT SFML_FOUND) + message("-- Can SFML 2.x sample be built - no") + else() + find_package(GLEW) + if(GLEW_FOUND) + message("-- Can SFML 2.x sample be built - yes: with GLEW") + include_directories(${SFML_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) + add_definitions( -DENABLE_GLEW ) + bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES} ${GLEW_LIBRARY}) + else() + message("-- Can SFML 2.x sample be built - yes: without GLEW") + include_directories(${SFML_INCLUDE_DIR}) + bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES}) + endif() + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml2) + install(TARGETS sfml2 + RUNTIME DESTINATION ${SAMPLES_DIR}/sfml2 + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + # Build and install the tutorials + foreach(tutorial ${tutorials}) + bl_sample(${tutorial} ${sample_LIBRARIES}) + + # The tutorials always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/tutorial/${tutorial}) + install(TARGETS ${tutorial} + RUNTIME DESTINATION ${SAMPLES_DIR}/${tutorial} + BUNDLE DESTINATION ${SAMPLES_DIR}) + endforeach() + + # Build and install invaders sample + bl_sample(invaders ${sample_LIBRARIES}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/invaders) + install(TARGETS invaders + RUNTIME DESTINATION ${SAMPLES_DIR}/invaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + + if(BUILD_PYTHON_BINDINGS) + # Build and install pyinvaders sample + bl_sample(pyinvaders ${sample_LIBRARIES} ${PYTHON_LIBRARIES} ${PY_BINDINGS_LINK_LIBS}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/pyinvaders) + install(TARGETS pyinvaders + RUNTIME DESTINATION ${SAMPLES_DIR}/pyinvaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + if(BUILD_LUA_BINDINGS) + bl_sample(luainvaders RocketCoreLua RocketControlsLua ${sample_LIBRARIES} ${LUA_BINDINGS_LINK_LIBS}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/luainvaders) + install(TARGETS luainvaders + RUNTIME DESTINATION ${SAMPLES_DIR}/luainvaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() +endif() + + +#=================================== +# Installation ===================== +#=================================== + +if(BUILD_LUA_BINDINGS AND BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + ) +else() + if(NOT BUILD_LUA_BINDINGS AND NOT BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Python" EXCLUDE + PATTERN "Lua" EXCLUDE + ) + else() + if(BUILD_PYTHON_BINDINGS) + install(FILES ${PROJECT_SOURCE_DIR}/bin/rocket.py + DESTINATION ${PYTHON_INSTDIR} + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Lua" EXCLUDE + ) + else() + if(BUILD_LUA_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Python" EXCLUDE + ) + else() + message(FATAL_ERROR "ASSERT: Unexpected option combination, this is a logical impossibility.") + endif() + endif() + endif() +endif() + +if(BUILD_SAMPLES) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/assets + DESTINATION ${SAMPLES_DIR} + ) + + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/template/data + DESTINATION ${SAMPLES_DIR}/tutorial/template + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid/data + DESTINATION ${SAMPLES_DIR}/tutorial/datagrid + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid_tree/data + DESTINATION ${SAMPLES_DIR}/tutorial/datagrid_tree + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/tutorial_drag/data + DESTINATION ${SAMPLES_DIR}/tutorial/tutorial_drag + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/treeview/data + DESTINATION ${SAMPLES_DIR}/basic/treeview + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/drag/data + DESTINATION ${SAMPLES_DIR}/basic/drag + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/invaders/data + DESTINATION ${SAMPLES_DIR}/invaders + ) + + if(BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/pyinvaders/data + DESTINATION ${SAMPLES_DIR}/pyinvaders + ) + endif() + + if(BUILD_LUA_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/data + DESTINATION ${SAMPLES_DIR}/luainvaders + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/lua + DESTINATION ${SAMPLES_DIR}/luainvaders + ) + endif() +endif() diff --git a/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt new file mode 100644 index 0000000..f4493c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt @@ -0,0 +1,2 @@ +add_subdirectory(Build) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt new file mode 100644 index 0000000..4ccb85d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) +project(noise CXX) + +# http://sourceforge.net/projects/libnoise + +file(GLOB SOURCE_CODE src/*.cpp src/*.h src/model/*.cpp src/model/*.h src/module/*.cpp src/module/*.h) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/etc) +add_library(${PACKAGE} SHARED ${SOURCE_CODE}) + +#IF(MSVC) +# add_definitions(/nologo /c /D_CRT_SECURE_NO_DEPRECATE) +#ENDIF() + +file(GLOB HEADER_CODE src/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}") + +file(GLOB HEADER_CODE src/model/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}/model") + +file(GLOB HEADER_CODE src/module/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}/module") diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp new file mode 100644 index 0000000..08c2a9f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp @@ -0,0 +1,1147 @@ +#include "OIS.h" + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +////////////////////////////////////Needed Windows Headers//////////// +#if defined OIS_WIN32_PLATFORM +# define WIN32_LEAN_AND_MEAN +# include "windows.h" +# include "resource.h" + +////////////////////////////////////Needed Linux Headers////////////// +#elif defined OIS_LINUX_PLATFORM +# include +# include +#else +# error Sorry, not yet implemented on this platform. +#endif + + +using namespace OIS; + +#if defined OIS_WIN32_PLATFORM + +// The dialog proc we have to give to CreateDialog +LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) +{ + return FALSE; +} + +#endif + +//////////// Event handler class declaration //////////////////////////////////////////////// +class Application; +class JoystickManager; +class EffectManager; + +class EventHandler : public KeyListener, public JoyStickListener +{ + protected: + + Application* _pApplication; + JoystickManager* _pJoystickMgr; + EffectManager* _pEffectMgr; + + public: + + EventHandler(Application* pApp); + void initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr); + + bool keyPressed( const KeyEvent &arg ); + bool keyReleased( const KeyEvent &arg ); + + bool buttonPressed( const JoyStickEvent &arg, int button ); + bool buttonReleased( const JoyStickEvent &arg, int button ); + + bool axisMoved( const JoyStickEvent &arg, int axis ); + + bool povMoved( const JoyStickEvent &arg, int pov ); +}; + +//////////// Variable classes //////////////////////////////////////////////////////// + +class Variable +{ + protected: + + double _dInitValue; + double _dValue; + + public: + + Variable(double dInitValue) : _dInitValue(dInitValue) { reset(); } + + double getValue() const { return _dValue; } + + void reset() { _dValue = _dInitValue; } + + virtual void setValue(double dValue) { _dValue = dValue; } + + virtual string toString() const + { + ostringstream oss; + oss << _dValue; + return oss.str(); + } + + virtual void update() {}; +}; + +class Constant : public Variable +{ + public: + + Constant(double dInitValue) : Variable(dInitValue) {} + + virtual void setValue(double dValue) { } + +}; + +class LimitedVariable : public Variable +{ + protected: + + double _dMinValue; + double _dMaxValue; + + public: + + LimitedVariable(double dInitValue, double dMinValue, double dMaxValue) + : _dMinValue(dMinValue), _dMaxValue(dMaxValue), Variable(dInitValue) + {} + + virtual void setValue(double dValue) + { + _dValue = dValue; + if (_dValue > _dMaxValue) + _dValue = _dMaxValue; + else if (_dValue < _dMinValue) + _dValue = _dMinValue; + } + +/* virtual string toString() const + { + ostringstream oss; + oss << setiosflags(ios_base::right) << setw(4) + << (int)(200.0 * getValue()/(_dMaxValue - _dMinValue)); // [-100%, +100%] + return oss.str(); + }*/ +}; + +class TriangleVariable : public LimitedVariable +{ + protected: + + double _dDeltaValue; + + public: + + TriangleVariable(double dInitValue, double dDeltaValue, double dMinValue, double dMaxValue) + : LimitedVariable(dInitValue, dMinValue, dMaxValue), _dDeltaValue(dDeltaValue) {}; + + virtual void update() + { + double dValue = getValue() + _dDeltaValue; + if (dValue > _dMaxValue) + { + dValue = _dMaxValue; + _dDeltaValue = -_dDeltaValue; + //cout << "Decreasing variable towards " << _dMinValue << endl; + } + else if (dValue < _dMinValue) + { + dValue = _dMinValue; + _dDeltaValue = -_dDeltaValue; + //cout << "Increasing variable towards " << _dMaxValue << endl; + } + setValue(dValue); + //cout << "TriangleVariable::update : delta=" << _dDeltaValue << ", value=" << dValue << endl; + } +}; + +//////////// Variable effect class ////////////////////////////////////////////////////////// + +typedef map MapVariables; +typedef void (*EffectVariablesApplier)(MapVariables& mapVars, Effect* pEffect); + +class VariableEffect +{ + protected: + + // Effect description + const char* _pszDesc; + + // The associate OIS effect + Effect* _pEffect; + + // The effect variables. + MapVariables _mapVariables; + + // The effect variables applier function. + EffectVariablesApplier _pfApplyVariables; + + // True if the effect is currently being played. + bool _bActive; + + public: + + VariableEffect(const char* pszDesc, Effect* pEffect, + const MapVariables& mapVars, const EffectVariablesApplier pfApplyVars) + : _pszDesc(pszDesc), _pEffect(pEffect), + _mapVariables(mapVars), _pfApplyVariables(pfApplyVars), _bActive(false) + {} + + ~VariableEffect() + { + if (_pEffect) + delete _pEffect; + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + if (iterVars->second) + delete iterVars->second; + + } + + void setActive(bool bActive = true) + { + reset(); + _bActive = bActive; + } + + bool isActive() + { + return _bActive; + } + + Effect* getFFEffect() + { + return _pEffect; + } + + const char* getDescription() const + { + return _pszDesc; + } + + void update() + { + if (isActive()) + { + // Update the variables. + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + iterVars->second->update(); + + // Apply the updated variable values to the effect. + _pfApplyVariables(_mapVariables, _pEffect); + } + } + + void reset() + { + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + iterVars->second->reset(); + _pfApplyVariables(_mapVariables, _pEffect); + } + + string toString() const + { + string str; + MapVariables::const_iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + str += iterVars->first + ":" + iterVars->second->toString() + " "; + return str; + } +}; + +//////////// Joystick manager class //////////////////////////////////////////////////////// + +class JoystickManager +{ + protected: + + // Input manager. + InputManager* _pInputMgr; + + // Vectors to hold joysticks and associated force feedback devices + vector _vecJoys; + vector _vecFFDev; + + // Selected joystick + int _nCurrJoyInd; + + // Force feedback detected ? + bool _bFFFound; + + // Selected joystick master gain. + float _dMasterGain; + + // Selected joystick auto-center mode. + bool _bAutoCenter; + + public: + + JoystickManager(InputManager* pInputMgr, EventHandler* pEventHdlr) + : _pInputMgr(pInputMgr), _nCurrJoyInd(-1), _dMasterGain(0.5), _bAutoCenter(true) + + { + _bFFFound = false; + for( int nJoyInd = 0; nJoyInd < pInputMgr->getNumberOfDevices(OISJoyStick); ++nJoyInd ) + { + //Create the stick + JoyStick* pJoy = (JoyStick*)pInputMgr->createInputObject( OISJoyStick, true ); + cout << endl << "Created buffered joystick #" << nJoyInd << " '" << pJoy->vendor() + << "' (Id=" << pJoy->getID() << ")"; + + // Check for FF, and if so, keep the joy and dump FF info + ForceFeedback* pFFDev = (ForceFeedback*)pJoy->queryInterface(Interface::ForceFeedback ); + if( pFFDev ) + { + _bFFFound = true; + + // Keep the joy to play with it. + pJoy->setEventCallback(pEventHdlr); + _vecJoys.push_back(pJoy); + + // Keep also the associated FF device + _vecFFDev.push_back(pFFDev); + + // Dump FF supported effects and other info. + cout << endl << " * Number of force feedback axes : " + << pFFDev->getFFAxesNumber() << endl; + const ForceFeedback::SupportedEffectList &lstFFEffects = + pFFDev->getSupportedEffects(); + if (lstFFEffects.size() > 0) + { + cout << " * Supported effects :"; + ForceFeedback::SupportedEffectList::const_iterator itFFEff; + for(itFFEff = lstFFEffects.begin(); itFFEff != lstFFEffects.end(); ++itFFEff) + cout << " " << Effect::getEffectTypeName(itFFEff->second); + cout << endl << endl; + } + else + cout << "Warning: no supported effect found !" << endl; + } + else + { + cout << " (no force feedback support detected) => ignored." << endl << endl; + _pInputMgr->destroyInputObject(pJoy); + } + } + } + + ~JoystickManager() + { + for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) + _pInputMgr->destroyInputObject( _vecJoys[nJoyInd] ); + } + + size_t getNumberOfJoysticks() const + { + return _vecJoys.size(); + } + + bool wasFFDetected() const + { + return _bFFFound; + } + + enum EWhichJoystick { ePrevious=-1, eNext=+1 }; + + void selectJoystick(EWhichJoystick eWhich) + { + // Note: Reset the master gain to half the maximum and autocenter mode to Off, + // when really selecting a new joystick. + if (_nCurrJoyInd < 0) + { + _nCurrJoyInd = 0; + _dMasterGain = 0.5; // Half the maximum. + changeMasterGain(0.0); + } + else + { + _nCurrJoyInd += eWhich; + if (_nCurrJoyInd < -1 || _nCurrJoyInd >= (int)_vecJoys.size()) + _nCurrJoyInd = -1; + if (_vecJoys.size() > 1 && _nCurrJoyInd >= 0) + { + _dMasterGain = 0.5; // Half the maximum. + changeMasterGain(0.0); + } + } + } + + ForceFeedback* getCurrentFFDevice() + { + return (_nCurrJoyInd >= 0) ? _vecFFDev[_nCurrJoyInd] : 0; + } + + void changeMasterGain(float dDeltaPercent) + { + if (_nCurrJoyInd >= 0) + { + _dMasterGain += dDeltaPercent / 100; + if (_dMasterGain > 1.0) + _dMasterGain = 1.0; + else if (_dMasterGain < 0.0) + _dMasterGain = 0.0; + + _vecFFDev[_nCurrJoyInd]->setMasterGain(_dMasterGain); + } + } + + enum EAutoCenterHow { eOff, eOn, eToggle }; + + void changeAutoCenter(EAutoCenterHow eHow = eToggle) + { + if (_nCurrJoyInd >= 0) + { + if (eHow == eToggle) + _bAutoCenter = !_bAutoCenter; + else + _bAutoCenter = (eHow == eOn ? true : false); + _vecFFDev[_nCurrJoyInd]->setAutoCenterMode(_bAutoCenter); + } + } + + void captureEvents() + { + // This fires off buffered events for each joystick we have + for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) + if( _vecJoys[nJoyInd] ) + _vecJoys[nJoyInd]->capture(); + } + + string toString() const + { + // Warning: Wrong result if more than 10 joysticks ... + ostringstream oss; + oss << "Joy:" << (_nCurrJoyInd >= 0 ? (char)('0' + _nCurrJoyInd) : '-'); + oss << " Gain:" << setiosflags(ios_base::right) << setw(3) << (int)(_dMasterGain*100); + oss << "% Center:" << (_bAutoCenter ? " On " : "Off"); + return oss.str(); + } +}; + +//////////// Effect variables applier functions ///////////////////////////////////////////// +// These functions apply the given Variables to the given OIS::Effect + +// Variable force "Force" + optional "AttackFactor" constant, on a OIS::ConstantEffect +void forceVariableApplier(MapVariables& mapVars, Effect* pEffect) +{ + double dForce = mapVars["Force"]->getValue(); + double dAttackFactor = 1.0; + if (mapVars.find("AttackFactor") != mapVars.end()) + dAttackFactor = mapVars["AttackFactor"]->getValue(); + + ConstantEffect* pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = (int)dForce; + pConstForce->envelope.attackLevel = (unsigned short)fabs(dForce*dAttackFactor); + pConstForce->envelope.fadeLevel = (unsigned short)fabs(dForce); // Fade never reached, in fact. +} + +// Variable "Period" on an OIS::PeriodicEffect +void periodVariableApplier(MapVariables& mapVars, Effect* pEffect) +{ + double dPeriod = mapVars["Period"]->getValue(); + + PeriodicEffect* pPeriodForce = dynamic_cast(pEffect->getForceEffect()); + pPeriodForce->period = (unsigned int)dPeriod; +} + + +//////////// Effect manager class ////////////////////////////////////////////////////////// + +class EffectManager +{ + protected: + + // The joystick manager + JoystickManager* _pJoystickMgr; + + // Vector to hold variable effects + vector _vecEffects; + + // Selected effect + int _nCurrEffectInd; + + // Update frequency (Hz) + unsigned int _nUpdateFreq; + + // Indexes (in _vecEffects) of the variable effects that are playable by the selected joystick. + vector _vecPlayableEffectInd; + + + public: + + EffectManager(JoystickManager* pJoystickMgr, unsigned int nUpdateFreq) + : _pJoystickMgr(pJoystickMgr), _nUpdateFreq(nUpdateFreq), _nCurrEffectInd(-1) + { + Effect* pEffect; + MapVariables mapVars; + ConstantEffect* pConstForce; + PeriodicEffect* pPeriodForce; + + // Please don't modify or remove effects (unless there is some bug ...) : + // add new ones to enhance the test repository. + // And feel free to add any tested device, even when the test failed ! + // Tested devices capabilities : + // - Logitech G25 Racing wheel : + // * Only 1 axis => no directional 2D effect (only left and right) + // * Full support for constant force under WinXPSP2DX9 and Linux 2.6.22.9 + // * Full support for periodic forces under WinXPSP2DX9 + // (but poor rendering under 20ms period), and no support under Linux 2.6.22.9 + // * Full support reported (not tested) for all other forces under WinXPSP2DX9, + // and no support under Linux 2.6.22.9 + // - Logitech Rumble pad 2 : + // * Only 1 axis => no directional 2D effect (only left and right) + // * Forces amplitude is rendered through the inertia motors rotation frequency + // (stronger force => quicker rotation) + // * 2 inertia motors : 1 with small inertia, 1 with "heavy" one. + // => poor force feedback rendering ... + // * Support (poor) for all OIS forces under WinXPSP2DX9, + // and only for Triangle, Square and Sine periodic forces under Linux 2.6.22.9 + // (reported by enumeration, but does not seem to work actually) + // Master gain setting tests: + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // Auto-center mode setting tests: + // - Logitech G25 Racing wheel : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. + // - Logitech Rumble pad 2 : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. + + // 1) Constant force on 1 axis with 20s-period triangle oscillations in [-10K, +10K]. + // Notes: Linux: replay_length: no way to get it to work if not 0 or Effect::OIS_INFINITE + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (but only light motor involved), + // Linux2.6.22.9=Not supported + pEffect = new Effect(Effect::ConstantForce, Effect::Constant); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; // Linux/Win32: Same behaviour as 0. + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = 5000; //-10K to +10k + pConstForce->envelope.attackLength = 0; + pConstForce->envelope.attackLevel = (unsigned short)pConstForce->level; + pConstForce->envelope.fadeLength = 0; + pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; + + mapVars.clear(); + mapVars["Force"] = + new TriangleVariable(0.0, // F0 + 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle + -10000.0, // Fmin + 10000.0); // Fmax + mapVars["AttackFactor"] = new Constant(1.0); + + _vecEffects.push_back + (new VariableEffect + ("Constant force on 1 axis with 20s-period triangle oscillations " + "of its signed amplitude in [-10K, +10K]", + pEffect, mapVars, forceVariableApplier)); + + // 2) Constant force on 1 axis with noticeable attack + // with 20s-period triangle oscillations in [-10K, +10K]. + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (including attack, but only light motor involved), + // Linux2.6.22.9=Not supported. + pEffect = new Effect(Effect::ConstantForce, Effect::Constant); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; //(unsigned int)(1000000.0/_nUpdateFreq); // Linux: Does not work. + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = 5000; //-10K to +10k + pConstForce->envelope.attackLength = (unsigned int)(1000000.0/_nUpdateFreq/2); + pConstForce->envelope.attackLevel = (unsigned short)(pConstForce->level*0.1); + pConstForce->envelope.fadeLength = 0; // Never reached, actually. + pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; // Idem + + mapVars.clear(); + mapVars["Force"] = + new TriangleVariable(0.0, // F0 + 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle + -10000.0, // Fmin + 10000.0); // Fmax + mapVars["AttackFactor"] = new Constant(0.1); + + _vecEffects.push_back + (new VariableEffect + ("Constant force on 1 axis with noticeable attack (app update period / 2)" + "and 20s-period triangle oscillations of its signed amplitude in [-10K, +10K]", + pEffect, mapVars, forceVariableApplier)); + + // 3) Triangle periodic force on 1 axis with 40s-period triangle oscillations + // of its period in [10, 400] ms, and constant amplitude + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK but only light motor involved, + // Linux2.6.22.9=Failed. + pEffect = new Effect(Effect::PeriodicForce, Effect::Triangle); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pPeriodForce = dynamic_cast(pEffect->getForceEffect()); + pPeriodForce->magnitude = 10000; // 0 to +10k + pPeriodForce->offset = 0; + pPeriodForce->phase = 0; // 0 to 35599 + pPeriodForce->period = 10000; // Micro-seconds + pPeriodForce->envelope.attackLength = 0; + pPeriodForce->envelope.attackLevel = (unsigned short)pPeriodForce->magnitude; + pPeriodForce->envelope.fadeLength = 0; + pPeriodForce->envelope.fadeLevel = (unsigned short)pPeriodForce->magnitude; + + mapVars.clear(); + mapVars["Period"] = + new TriangleVariable(1*1000.0, // P0 + 4*(400-10)*1000.0/_nUpdateFreq / 40.0, // dP for a 40s-period triangle + 10*1000.0, // Pmin + 400*1000.0); // Pmax + _vecEffects.push_back + (new VariableEffect + ("Periodic force on 1 axis with 40s-period triangle oscillations " + "of its period in [10, 400] ms, and constant amplitude", + pEffect, mapVars, periodVariableApplier)); + + } + + ~EffectManager() + { + vector::iterator iterEffs; + for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) + delete *iterEffs; + } + + void updateActiveEffects() + { + vector::iterator iterEffs; + for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) + if ((*iterEffs)->isActive()) + { + (*iterEffs)->update(); + _pJoystickMgr->getCurrentFFDevice()->modify((*iterEffs)->getFFEffect()); + } + } + + void checkPlayableEffects() + { + // Nothing to do if no joystick currently selected + if (!_pJoystickMgr->getCurrentFFDevice()) + return; + + // Get the list of indexes of effects that the selected device can play + _vecPlayableEffectInd.clear(); + for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) + { + const Effect::EForce eForce = _vecEffects[nEffInd]->getFFEffect()->force; + const Effect::EType eType = _vecEffects[nEffInd]->getFFEffect()->type; + if (_pJoystickMgr->getCurrentFFDevice()->supportsEffect(eForce, eType)) + { + _vecPlayableEffectInd.push_back(nEffInd); + } + } + + // Print details about playable effects + if (_vecPlayableEffectInd.empty()) + { + cout << endl << endl << "The device can't play any effect of the test set" << endl; + } + else + { + cout << endl << endl << "Selected device can play the following effects :" << endl; + for (size_t nEffIndInd = 0; nEffIndInd < _vecPlayableEffectInd.size(); nEffIndInd++) + printEffect(_vecPlayableEffectInd[nEffIndInd]); + cout << endl; + } + } + + enum EWhichEffect { ePrevious=-1, eNone=0, eNext=+1 }; + + void selectEffect(EWhichEffect eWhich) + { + + // Nothing to do if no joystick currently selected + if (!_pJoystickMgr->getCurrentFFDevice()) + { + cout << "\nNo Joystick selected.\n"; + return; + } + + // Nothing to do if joystick cannot play any effect + if (_vecPlayableEffectInd.empty()) + { + cout << "\nNo playable effects.\n"; + return; + } + + // If no effect selected, and next or previous requested, select the first one. + if (eWhich != eNone && _nCurrEffectInd < 0) + _nCurrEffectInd = 0; + + // Otherwise, remove the current one from the device, + // and then select the requested one if any. + else if (_nCurrEffectInd >= 0) + { + _pJoystickMgr->getCurrentFFDevice() + ->remove(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); + _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(false); + _nCurrEffectInd += eWhich; + if (_nCurrEffectInd < -1 || _nCurrEffectInd >= (int)_vecPlayableEffectInd.size()) + _nCurrEffectInd = -1; + } + + // If no effect must be selected, reset the selection index + if (eWhich == eNone) + { + _nCurrEffectInd = -1; + } + + // Otherwise, upload the new selected effect to the device if any. + else if (_nCurrEffectInd >= 0) + { + _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(true); + _pJoystickMgr->getCurrentFFDevice() + ->upload(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); + } + } + + void printEffect(size_t nEffInd) + { + cout << "* #" << nEffInd << " : " << _vecEffects[nEffInd]->getDescription() << endl; + } + + void printEffects() + { + for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) + printEffect(nEffInd); + } + + string toString() const + { + ostringstream oss; + oss << "DevMem: " << setiosflags(ios_base::right) << setw(3); + + //This causes constant exceptions with my device. Not needed for anything other than debugging + //if (_pJoystickMgr->getCurrentFFDevice()) + // oss << _pJoystickMgr->getCurrentFFDevice()->getFFMemoryLoad() << "%"; + //else + // oss << "----"; + + oss << " Effect:" << setw(2); + if (_nCurrEffectInd >= 0) + oss << _vecPlayableEffectInd[_nCurrEffectInd] + << " " << _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->toString(); + else + oss << "--"; + return oss.str(); + } +}; + +//////////// Application class //////////////////////////////////////////////////////// + +class Application +{ + protected: + InputManager* _pInputMgr; + EventHandler* _pEventHdlr; + Keyboard* _pKeyboard; + JoystickManager* _pJoystickMgr; + EffectManager* _pEffectMgr; + +#if defined OIS_WIN32_PLATFORM + HWND _hWnd; +#elif defined OIS_LINUX_PLATFORM + Display* _pXDisp; + Window _xWin; +#endif + + bool _bMustStop; + bool _bIsInitialized; + + int _nStatus; + + // App. hart beat frequency. + static const unsigned int _nHartBeatFreq = 20; // Hz + + // Effects update frequency (Hz) : Needs to be quite lower than app. hart beat frequency, + // if we want to be able to calmly study effect changes ... + static const unsigned int _nEffectUpdateFreq = 1; // Hz + + public: + + Application(int argc, const char* argv[]) + { + _pInputMgr = 0; + _pEventHdlr = 0; + _pKeyboard = 0; + _pJoystickMgr = 0; + _pEffectMgr = 0; + +#if defined OIS_WIN32_PLATFORM + _hWnd = 0; +#elif defined OIS_LINUX_PLATFORM + _pXDisp = 0; + _xWin = 0; +#endif + + _bMustStop = false; + + _bIsInitialized = false; + _nStatus = 0; + } + + int initialize() + { + ostringstream wnd; + +#if defined OIS_WIN32_PLATFORM + + //Create a capture window for Input Grabbing + _hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); + if( _hWnd == NULL ) + OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); + + ShowWindow(_hWnd, SW_SHOW); + + wnd << (size_t)_hWnd; + +#elif defined OIS_LINUX_PLATFORM + + //Connects to default X window + if( !(_pXDisp = XOpenDisplay(0)) ) + OIS_EXCEPT(E_General, "Error opening X!"); + + //Create a window + _xWin = XCreateSimpleWindow(_pXDisp,DefaultRootWindow(_pXDisp), 0,0, 100,100, 0, 0, 0); + + //bind our connection to that window + XMapWindow(_pXDisp, _xWin); + + //Select what events we want to listen to locally + XSelectInput(_pXDisp, _xWin, StructureNotifyMask); + + //Wait for Window to show up + XEvent event; + do { XNextEvent(_pXDisp, &event); } while(event.type != MapNotify); + + wnd << _xWin; + +#endif + + // Create OIS input manager + ParamList pl; + pl.insert(make_pair(string("WINDOW"), wnd.str())); + _pInputMgr = InputManager::createInputSystem(pl); + cout << _pInputMgr->inputSystemName() << " created." << endl; + + // Create the event handler. + _pEventHdlr = new EventHandler(this); + + // Create a simple keyboard + _pKeyboard = (Keyboard*)_pInputMgr->createInputObject( OISKeyboard, true ); + _pKeyboard->setEventCallback( _pEventHdlr ); + + // Create the joystick manager. + _pJoystickMgr = new JoystickManager(_pInputMgr, _pEventHdlr); + if( !_pJoystickMgr->wasFFDetected() ) + { + cout << "No Force Feedback device detected." << endl; + _nStatus = 1; + return _nStatus; + } + + // Create force feedback effect manager. + _pEffectMgr = new EffectManager(_pJoystickMgr, _nEffectUpdateFreq); + + // Initialize the event handler. + _pEventHdlr->initialize(_pJoystickMgr, _pEffectMgr); + + _bIsInitialized = true; + + return _nStatus; + } + +#if defined OIS_LINUX_PLATFORM + + // This is just here to show that you still receive x11 events, + // as the lib only needs mouse/key events + void checkX11Events() + { + XEvent event; + + //Poll x11 for events + while( XPending(_pXDisp) > 0 ) + { + XNextEvent(_pXDisp, &event); + } + } +#endif + + int run() + { + const unsigned int nMaxEffectUpdateCnt = _nHartBeatFreq / _nEffectUpdateFreq; + unsigned int nEffectUpdateCnt = 0; + + // Initailize app. if not already done, and exit if something went wrong. + if (!_bIsInitialized) + initialize(); + + if (!_bIsInitialized) + return _nStatus; + + try + { + //Main polling loop + while(!_bMustStop) + { + // This fires off buffered events for keyboards + _pKeyboard->capture(); + + // This fires off buffered events for each joystick we have + _pJoystickMgr->captureEvents(); + + // Update currently selected effects if time has come to. + if (!nEffectUpdateCnt) + { + _pEffectMgr->updateActiveEffects(); + nEffectUpdateCnt = nMaxEffectUpdateCnt; + } + else + nEffectUpdateCnt--; + + // Update state line. + cout << "\r" << _pJoystickMgr->toString() << " " << _pEffectMgr->toString() + << " "; + + //Throttle down CPU usage & handle OS events +#if defined OIS_WIN32_PLATFORM + Sleep( (DWORD)(1000.0/_nHartBeatFreq) ); + MSG msg; + while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) + { + TranslateMessage( &msg ); + DispatchMessage( &msg ); + } +#elif defined OIS_LINUX_PLATFORM + checkX11Events(); + usleep(1000000.0/_nHartBeatFreq); +#endif + } + } + catch( const Exception &ex ) + { +#if defined OIS_WIN32_PLATFORM + MessageBox(0, ex.eText, "Exception Raised!", MB_OK); +#else + cout << endl << "OIS Exception Caught!" << endl + << "\t" << ex.eText << "[Line " << ex.eLine << " in " << ex.eFile << "]" << endl; +#endif + } + + terminate(); + + return _nStatus; + } + + void stop() + { + _bMustStop = true; + } + + void terminate() + { + if (_pInputMgr) + { + _pInputMgr->destroyInputObject( _pKeyboard ); + _pKeyboard = 0; + if (_pJoystickMgr) + { + delete _pJoystickMgr; + _pJoystickMgr = 0; + } + InputManager::destroyInputSystem(_pInputMgr); + _pInputMgr = 0; + } + if (_pEffectMgr) + { + delete _pEffectMgr; + _pEffectMgr = 0; + } + if (_pEventHdlr) + { + delete _pEventHdlr; + _pEventHdlr = 0; + } + +#if defined OIS_LINUX_PLATFORM + // Be nice to X and clean up the x window + XDestroyWindow(_pXDisp, _xWin); + XCloseDisplay(_pXDisp); +#endif + } + + JoystickManager* getJoystickManager() + { + return _pJoystickMgr; + } + + EffectManager* getEffectManager() + { + return _pEffectMgr; + } + + void printHelp() + { + cout << endl + << "Keyboard actions :" << endl + << "* Escape : Exit App" << endl + << "* H : This help menu" << endl + << "* Right/Left : Select next/previous joystick among the FF capable detected ones" << endl + << "* Up/Down : Select next/previous effect for the selected joystick" << endl + << "* PgUp/PgDn : Increase/decrease from 5% the master gain " + << "for all the joysticks" << endl + << "* Space : Toggle auto-centering on all the joysticks" << endl; + if (_bIsInitialized) + { + cout << endl << "Implemented effects :" << endl << endl; + _pEffectMgr->printEffects(); + cout << endl; + } + } +}; + +//////////// Event handler class definition //////////////////////////////////////////////// + +EventHandler::EventHandler(Application* pApp) +: _pApplication(pApp) +{} + +void EventHandler::initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr) +{ + _pJoystickMgr = pJoystickMgr; + _pEffectMgr = pEffectMgr; +} + +bool EventHandler::keyPressed( const KeyEvent &arg ) +{ + switch (arg.key) + { + // Quit. + case KC_ESCAPE: + _pApplication->stop(); + break; + + // Help. + case KC_H: + _pApplication->printHelp(); + break; + + // Change current joystick. + case KC_RIGHT: + _pEffectMgr->selectEffect(EffectManager::eNone); + _pJoystickMgr->selectJoystick(JoystickManager::eNext); + _pEffectMgr->checkPlayableEffects(); + break; + case KC_LEFT: + _pEffectMgr->selectEffect(EffectManager::eNone); + _pJoystickMgr->selectJoystick(JoystickManager::ePrevious); + _pEffectMgr->checkPlayableEffects(); + break; + + // Change current effect. + case KC_UP: + _pEffectMgr->selectEffect(EffectManager::eNext); + break; + case KC_DOWN: + _pEffectMgr->selectEffect(EffectManager::ePrevious); + break; + + // Change current master gain. + case KC_PGUP: + _pJoystickMgr->changeMasterGain(5.0); // Percent + break; + case KC_PGDOWN: + _pJoystickMgr->changeMasterGain(-5.0); // Percent + break; + + // Toggle auto-center mode. + case KC_SPACE: + _pJoystickMgr->changeAutoCenter(); + break; + + default: + cout << "Non mapped key: " << arg.key << endl; + } + return true; +} + +bool EventHandler::keyReleased( const KeyEvent &arg ) +{ + return true; +} + +bool EventHandler::buttonPressed( const JoyStickEvent &arg, int button ) +{ + return true; +} +bool EventHandler::buttonReleased( const JoyStickEvent &arg, int button ) +{ + return true; +} +bool EventHandler::axisMoved( const JoyStickEvent &arg, int axis ) +{ + return true; +} +bool EventHandler::povMoved( const JoyStickEvent &arg, int pov ) +{ + return true; +} + +//========================================================================================== +int main(int argc, const char* argv[]) +{ + + cout << endl + << "This is a simple command line Force Feedback testing demo ..." << endl + << "All connected joystick devices will be created and if FF Support is found," << endl + << "you'll be able to play some predefined variable effects on them." << endl << endl + << "Note: 1 effect can be played on 1 joystick at a time for the moment." << endl << endl; + + Application app(argc, argv); + + int status = app.initialize(); + + if (!status) + { + app.printHelp(); + + status = app.run(); + } + + cout << endl << endl << "Exiting ..." << endl << endl; + +#if defined OIS_WIN32_PLATFORM && _DEBUG + cout << "Click on this window and ..." << endl; + system("pause"); +#endif + + exit(status); +} diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am new file mode 100644 index 0000000..926f7f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am @@ -0,0 +1,11 @@ +INCLUDES = $(STLPORT_CFLAGS) -I$(top_srcdir)/includes $(CFLAGS) -I/usr/X11R6/include + +noinst_PROGRAMS = ConsoleApp FFConsoleTest + +ConsoleApp_SOURCES = OISConsole.cpp +ConsoleApp_LDFLAGS = -L$(top_builddir)/src +ConsoleApp_LDADD = -lOIS -lX11 -lXext + +FFConsoleTest_SOURCES = FFConsoleDemo.cpp +FFConsoleTest_LDFLAGS = -L$(top_builddir)/src +FFConsoleTest_LDADD = -lOIS -lX11 -lXext diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp new file mode 100644 index 0000000..0850004 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp @@ -0,0 +1,459 @@ +//////////////////////////////// OS Nuetral Headers //////////////// +#include "OISInputManager.h" +#include "OISException.h" +#include "OISKeyboard.h" +#include "OISMouse.h" +#include "OISJoyStick.h" +#include "OISEvents.h" + +//Advanced Usage +#include "OISForceFeedback.h" + +#include +#include +#include + +////////////////////////////////////Needed Windows Headers//////////// +#if defined OIS_WIN32_PLATFORM +# define WIN32_LEAN_AND_MEAN +# include "windows.h" +# ifdef min +# undef min +# endif +# include "resource.h" + LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ); +////////////////////////////////////////////////////////////////////// +////////////////////////////////////Needed Linux Headers////////////// +#elif defined OIS_LINUX_PLATFORM +# include +# include + void checkX11Events(); +////////////////////////////////////////////////////////////////////// +////////////////////////////////////Needed Mac Headers////////////// +#elif defined OIS_APPLE_PLATFORM +# include + void checkMacEvents(); +#endif +////////////////////////////////////////////////////////////////////// +using namespace OIS; + +//-- Some local prototypes --// +void doStartup(); +void handleNonBufferedKeys(); +void handleNonBufferedMouse(); +void handleNonBufferedJoy( JoyStick* js ); + +//-- Easy access globals --// +bool appRunning = true; //Global Exit Flag + +const char *g_DeviceType[6] = {"OISUnknown", "OISKeyboard", "OISMouse", "OISJoyStick", + "OISTablet", "OISOther"}; + +InputManager *g_InputManager = 0; //Our Input System +Keyboard *g_kb = 0; //Keyboard Device +Mouse *g_m = 0; //Mouse Device +JoyStick* g_joys[4] = {0,0,0,0}; //This demo supports up to 4 controllers + +//-- OS Specific Globals --// +#if defined OIS_WIN32_PLATFORM + HWND hWnd = 0; +#elif defined OIS_LINUX_PLATFORM + Display *xDisp = 0; + Window xWin = 0; +#elif defined OIS_APPLE_PLATFORM + WindowRef mWin = 0; +#endif + +//////////// Common Event handler class //////// +class EventHandler : public KeyListener, public MouseListener, public JoyStickListener +{ +public: + EventHandler() {} + ~EventHandler() {} + bool keyPressed( const KeyEvent &arg ) { + std::cout << " KeyPressed {" << arg.key + << ", " << ((Keyboard*)(arg.device))->getAsString(arg.key) + << "} || Character (" << (char)arg.text << ")" << std::endl; + return true; + } + bool keyReleased( const KeyEvent &arg ) { + if( arg.key == KC_ESCAPE || arg.key == KC_Q ) + appRunning = false; + std::cout << "KeyReleased {" << ((Keyboard*)(arg.device))->getAsString(arg.key) << "}\n"; + return true; + } + bool mouseMoved( const MouseEvent &arg ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouseMoved: Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool mousePressed( const MouseEvent &arg, MouseButtonID id ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouse button #" << id << " pressed. Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool mouseReleased( const MouseEvent &arg, MouseButtonID id ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouse button #" << id << " released. Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool buttonPressed( const JoyStickEvent &arg, int button ) { + std::cout << std::endl << arg.device->vendor() << ". Button Pressed # " << button; + return true; + } + bool buttonReleased( const JoyStickEvent &arg, int button ) { + std::cout << std::endl << arg.device->vendor() << ". Button Released # " << button; + return true; + } + bool axisMoved( const JoyStickEvent &arg, int axis ) + { + //Provide a little dead zone + if( arg.state.mAxes[axis].abs > 2500 || arg.state.mAxes[axis].abs < -2500 ) + std::cout << std::endl << arg.device->vendor() << ". Axis # " << axis << " Value: " << arg.state.mAxes[axis].abs; + return true; + } + bool povMoved( const JoyStickEvent &arg, int pov ) + { + std::cout << std::endl << arg.device->vendor() << ". POV" << pov << " "; + + if( arg.state.mPOV[pov].direction & Pov::North ) //Going up + std::cout << "North"; + else if( arg.state.mPOV[pov].direction & Pov::South ) //Going down + std::cout << "South"; + + if( arg.state.mPOV[pov].direction & Pov::East ) //Going right + std::cout << "East"; + else if( arg.state.mPOV[pov].direction & Pov::West ) //Going left + std::cout << "West"; + + if( arg.state.mPOV[pov].direction == Pov::Centered ) //stopped/centered out + std::cout << "Centered"; + return true; + } + + bool vector3Moved( const JoyStickEvent &arg, int index) + { + std::cout.precision(2); + std::cout.flags(std::ios::fixed | std::ios::right); + std::cout << std::endl << arg.device->vendor() << ". Orientation # " << index + << " X Value: " << arg.state.mVectors[index].x + << " Y Value: " << arg.state.mVectors[index].y + << " Z Value: " << arg.state.mVectors[index].z; + std::cout.precision(); + std::cout.flags(); + return true; + } +}; + +//Create a global instance +EventHandler handler; + +int main() +{ + std::cout << "\n\n*** OIS Console Demo App is starting up... *** \n"; + try + { + doStartup(); + std::cout << "\nStartup done... Hit 'q' or ESC to exit.\n\n"; + + while(appRunning) + { + //Throttle down CPU usage + #if defined OIS_WIN32_PLATFORM + Sleep(90); + MSG msg; + while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) + { + TranslateMessage( &msg ); + DispatchMessage( &msg ); + } + #elif defined OIS_LINUX_PLATFORM + checkX11Events(); + usleep( 500 ); + #elif defined OIS_APPLE_PLATFORM + checkMacEvents(); + usleep( 500 ); + #endif + + if( g_kb ) + { + g_kb->capture(); + if( !g_kb->buffered() ) + handleNonBufferedKeys(); + } + + if( g_m ) + { + g_m->capture(); + if( !g_m->buffered() ) + handleNonBufferedMouse(); + } + + for( int i = 0; i < 4 ; ++i ) + { + if( g_joys[i] ) + { + g_joys[i]->capture(); + if( !g_joys[i]->buffered() ) + handleNonBufferedJoy( g_joys[i] ); + } + } + } + } + catch( const Exception &ex ) + { + #if defined OIS_WIN32_PLATFORM + MessageBox( NULL, ex.eText, "An exception has occurred!", MB_OK | + MB_ICONERROR | MB_TASKMODAL); + #else + std::cout << "\nOIS Exception Caught!\n" << "\t" << ex.eText << "[Line " + << ex.eLine << " in " << ex.eFile << "]\nExiting App"; + #endif + } + catch(std::exception &ex) + { + std::cout << "Caught std::exception: what = " << ex.what() << std::endl; + } + + //Destroying the manager will cleanup unfreed devices + if( g_InputManager ) + InputManager::destroyInputSystem(g_InputManager); + +#if defined OIS_LINUX_PLATFORM + // Be nice to X and clean up the x window + XDestroyWindow(xDisp, xWin); + XCloseDisplay(xDisp); +#endif + + std::cout << "\n\nGoodbye\n\n"; + return 0; +} + +void doStartup() +{ + ParamList pl; + +#if defined OIS_WIN32_PLATFORM + //Create a capture window for Input Grabbing + hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); + if( hWnd == NULL ) + OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); + + ShowWindow(hWnd, SW_SHOW); + + std::ostringstream wnd; + wnd << (size_t)hWnd; + + pl.insert(std::make_pair( std::string("WINDOW"), wnd.str() )); + + //Default mode is foreground exclusive..but, we want to show mouse - so nonexclusive +// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_FOREGROUND" ))); +// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_NONEXCLUSIVE"))); +#elif defined OIS_LINUX_PLATFORM + //Connects to default X window + if( !(xDisp = XOpenDisplay(0)) ) + OIS_EXCEPT(E_General, "Error opening X!"); + //Create a window + xWin = XCreateSimpleWindow(xDisp,DefaultRootWindow(xDisp), 0,0, 100,100, 0, 0, 0); + //bind our connection to that window + XMapWindow(xDisp, xWin); + //Select what events we want to listen to locally + XSelectInput(xDisp, xWin, StructureNotifyMask); + XEvent evtent; + do + { + XNextEvent(xDisp, &evtent); + } while(evtent.type != MapNotify); + + std::ostringstream wnd; + wnd << xWin; + + pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); + + //For this demo, show mouse and do not grab (confine to window) +// pl.insert(std::make_pair(std::string("x11_mouse_grab"), std::string("false"))); +// pl.insert(std::make_pair(std::string("x11_mouse_hide"), std::string("false"))); +#elif defined OIS_APPLE_PLATFORM + // create the window rect in global coords + ::Rect windowRect; + windowRect.left = 0; + windowRect.top = 0; + windowRect.right = 300; + windowRect.bottom = 300; + + // set the default attributes for the window + WindowAttributes windowAttrs = kWindowStandardDocumentAttributes + | kWindowStandardHandlerAttribute + | kWindowInWindowMenuAttribute + | kWindowHideOnFullScreenAttribute; + + // Create the window + CreateNewWindow(kDocumentWindowClass, windowAttrs, &windowRect, &mWin); + + // Color the window background black + SetThemeWindowBackground (mWin, kThemeBrushBlack, true); + + // Set the title of our window + CFStringRef titleRef = CFStringCreateWithCString( kCFAllocatorDefault, "OIS Input", kCFStringEncodingASCII ); + SetWindowTitleWithCFString( mWin, titleRef ); + + // Center our window on the screen + RepositionWindow( mWin, NULL, kWindowCenterOnMainScreen ); + + // Install the event handler for the window + InstallStandardEventHandler(GetWindowEventTarget(mWin)); + + // This will give our window focus, and not lock it to the terminal + ProcessSerialNumber psn = { 0, kCurrentProcess }; + TransformProcessType( &psn, kProcessTransformToForegroundApplication ); + SetFrontProcess(&psn); + + // Display and select our window + ShowWindow(mWin); + SelectWindow(mWin); + + std::ostringstream wnd; + wnd << (unsigned int)mWin; //cast to int so it gets encoded correctly (else it gets stored as a hex string) + std::cout << "WindowRef: " << mWin << " WindowRef as int: " << wnd.str() << "\n"; + pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); +#endif + + //This never returns null.. it will raise an exception on errors + g_InputManager = InputManager::createInputSystem(pl); + + //Lets enable all addons that were compiled in: + g_InputManager->enableAddOnFactory(InputManager::AddOn_All); + + //Print debugging information + unsigned int v = g_InputManager->getVersionNumber(); + std::cout << "OIS Version: " << (v>>16 ) << "." << ((v>>8) & 0x000000FF) << "." << (v & 0x000000FF) + << "\nRelease Name: " << g_InputManager->getVersionName() + << "\nManager: " << g_InputManager->inputSystemName() + << "\nTotal Keyboards: " << g_InputManager->getNumberOfDevices(OISKeyboard) + << "\nTotal Mice: " << g_InputManager->getNumberOfDevices(OISMouse) + << "\nTotal JoySticks: " << g_InputManager->getNumberOfDevices(OISJoyStick); + + //List all devices + DeviceList list = g_InputManager->listFreeDevices(); + for( DeviceList::iterator i = list.begin(); i != list.end(); ++i ) + std::cout << "\n\tDevice: " << g_DeviceType[i->first] << " Vendor: " << i->second; + + g_kb = (Keyboard*)g_InputManager->createInputObject( OISKeyboard, true ); + g_kb->setEventCallback( &handler ); + + g_m = (Mouse*)g_InputManager->createInputObject( OISMouse, true ); + g_m->setEventCallback( &handler ); + const MouseState &ms = g_m->getMouseState(); + ms.width = 100; + ms.height = 100; + + try + { + //This demo uses at most 4 joysticks - use old way to create (i.e. disregard vendor) + int numSticks = std::min(g_InputManager->getNumberOfDevices(OISJoyStick), 4); + for( int i = 0; i < numSticks; ++i ) + { + g_joys[i] = (JoyStick*)g_InputManager->createInputObject( OISJoyStick, true ); + g_joys[i]->setEventCallback( &handler ); + std::cout << "\n\nCreating Joystick " << (i + 1) + << "\n\tAxes: " << g_joys[i]->getNumberOfComponents(OIS_Axis) + << "\n\tSliders: " << g_joys[i]->getNumberOfComponents(OIS_Slider) + << "\n\tPOV/HATs: " << g_joys[i]->getNumberOfComponents(OIS_POV) + << "\n\tButtons: " << g_joys[i]->getNumberOfComponents(OIS_Button) + << "\n\tVector3: " << g_joys[i]->getNumberOfComponents(OIS_Vector3); + } + } + catch(OIS::Exception &ex) + { + std::cout << "\nException raised on joystick creation: " << ex.eText << std::endl; + } +} + +void handleNonBufferedKeys() +{ + if( g_kb->isKeyDown( KC_ESCAPE ) || g_kb->isKeyDown( KC_Q ) ) + appRunning = false; + + if( g_kb->isModifierDown(Keyboard::Shift) ) + std::cout << "Shift is down..\n"; + if( g_kb->isModifierDown(Keyboard::Alt) ) + std::cout << "Alt is down..\n"; + if( g_kb->isModifierDown(Keyboard::Ctrl) ) + std::cout << "Ctrl is down..\n"; +} + +void handleNonBufferedMouse() +{ + //Just dump the current mouse state + const MouseState &ms = g_m->getMouseState(); + std::cout << "\nMouse: Abs(" << ms.X.abs << " " << ms.Y.abs << " " << ms.Z.abs + << ") B: " << ms.buttons << " Rel(" << ms.X.rel << " " << ms.Y.rel << " " << ms.Z.rel << ")"; +} + +void handleNonBufferedJoy( JoyStick* js ) +{ + //Just dump the current joy state + const JoyStickState &joy = js->getJoyStickState(); + for( unsigned int i = 0; i < joy.mAxes.size(); ++i ) + std::cout << "\nAxis " << i << " X: " << joy.mAxes[i].abs; +} + +#if defined OIS_WIN32_PLATFORM +LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) +{ + return FALSE; +} +#endif + +#if defined OIS_LINUX_PLATFORM +//This is just here to show that you still recieve x11 events, as the lib only needs mouse/key events +void checkX11Events() +{ + XEvent event; + + //Poll x11 for events (keyboard and mouse events are caught here) + while( XPending(xDisp) > 0 ) + { + XNextEvent(xDisp, &event); + //Handle Resize events + if( event.type == ConfigureNotify ) + { + if( g_m ) + { + const MouseState &ms = g_m->getMouseState(); + ms.width = event.xconfigure.width; + ms.height = event.xconfigure.height; + } + } + else if( event.type == DestroyNotify ) + { + std::cout << "Exiting...\n"; + appRunning = false; + } + else + std::cout << "\nUnknown X Event: " << event.type << std::endl; + } +} +#endif + +#if defined OIS_APPLE_PLATFORM +void checkMacEvents() +{ + //TODO - Check for window resize events, and then adjust the members of mousestate + EventRef event = NULL; + EventTargetRef targetWindow = GetEventDispatcherTarget(); + + if( ReceiveNextEvent( 0, NULL, kEventDurationNoWait, true, &event ) == noErr ) + { + SendEventToEventTarget(event, targetWindow); + std::cout << "Event : " << GetEventKind(event) << "\n"; + ReleaseEvent(event); + } +} +#endif diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp new file mode 100644 index 0000000..6e70213 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp @@ -0,0 +1,563 @@ +/* +The zlib/libpng License + +Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) + +This software is provided 'as-is', without any express or implied warranty. In no event will +the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial +applications, and to alter it and redistribute it freely, subject to the following +restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim that + you wrote the original software. If you use this software in a product, + an acknowledgment in the product documentation would be appreciated but is + not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "linux/LinuxForceFeedback.h" +#include "OISException.h" + +#include +#include +#include + +#ifdef HAVE_UNISTD_H +#include +#endif + +using namespace OIS; + +// 0 = No trace; 1 = Important traces; 2 = Debug traces +#define OIS_LINUX_JOYFF_DEBUG 1 + +#ifdef OIS_LINUX_JOYFF_DEBUG +# include + using namespace std; +#endif + +//--------------------------------------------------------------// +LinuxForceFeedback::LinuxForceFeedback(int deviceID) : + ForceFeedback(), mJoyStick(deviceID) +{ +} + +//--------------------------------------------------------------// +LinuxForceFeedback::~LinuxForceFeedback() +{ + // Unload all effects. + for(EffectList::iterator i = mEffectList.begin(); i != mEffectList.end(); ++i ) + { + struct ff_effect *linEffect = i->second; + if( linEffect ) + _unload(linEffect->id); + } + + mEffectList.clear(); +} + +//--------------------------------------------------------------// +unsigned short LinuxForceFeedback::getFFMemoryLoad() +{ + int nEffects = -1; + if (ioctl(mJoyStick, EVIOCGEFFECTS, &nEffects) == -1) + OIS_EXCEPT(E_General, "Unknown error reading max number of uploaded effects."); +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << "LinuxForceFeedback("<< mJoyStick + << ") : Read device max number of uploaded effects : " << nEffects << endl; +#endif + + return (unsigned short int)(nEffects > 0 ? 100.0*mEffectList.size()/nEffects : 100); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::setMasterGain(float value) +{ + if (!mSetGainSupport) + { +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain " + << "is not supported by the device" << endl; +#endif + return; + } + + struct input_event event; + + memset(&event, 0, sizeof(event)); + event.type = EV_FF; + event.code = FF_GAIN; + if (value < 0.0) + value = 0.0; + else if (value > 1.0) + value = 1.0; + event.value = (__s32)(value * 0xFFFFUL); + +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain to " + << value << " => " << event.value << endl; +#endif + + if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { + OIS_EXCEPT(E_General, "Unknown error changing master gain."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::setAutoCenterMode(bool enabled) +{ + if (!mSetAutoCenterSupport) + { +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting auto-center mode " + << "is not supported by the device" << endl; +#endif + return; + } + + struct input_event event; + + memset(&event, 0, sizeof(event)); + event.type = EV_FF; + event.code = FF_AUTOCENTER; + event.value = (__s32)(enabled*0xFFFFFFFFUL); + +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Toggling auto-center to " + << enabled << " => 0x" << hex << event.value << dec << endl; +#endif + + if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { + OIS_EXCEPT(E_General, "Unknown error toggling auto-center."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::upload( const Effect* effect ) +{ + switch( effect->force ) + { + case OIS::Effect::ConstantForce: + _updateConstantEffect(effect); + break; + case OIS::Effect::ConditionalForce: + _updateConditionalEffect(effect); + break; + case OIS::Effect::PeriodicForce: + _updatePeriodicEffect(effect); + break; + case OIS::Effect::RampForce: + _updateRampEffect(effect); + break; + case OIS::Effect::CustomForce: + //_updateCustomEffect(effect); + //break; + default: + OIS_EXCEPT(E_NotImplemented, "Requested force not implemented yet, sorry!"); + break; + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::modify( const Effect* effect ) +{ + upload(effect); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::remove( const Effect* effect ) +{ + //Get the effect - if it exists + EffectList::iterator i = mEffectList.find(effect->_handle); + if( i != mEffectList.end() ) + { + struct ff_effect *linEffect = i->second; + if( linEffect ) + { + _stop(effect->_handle); + + _unload(effect->_handle); + + free(linEffect); + + mEffectList.erase(i); + } + else + mEffectList.erase(i); + } +} + +//--------------------------------------------------------------// +// To Signed16/Unsigned15 safe conversions +#define MaxUnsigned15Value 0x7FFF +#define toUnsigned15(value) \ + (__u16)((value) < 0 ? 0 : ((value) > MaxUnsigned15Value ? MaxUnsigned15Value : (value))) + +#define MaxSigned16Value 0x7FFF +#define MinSigned16Value -0x7FFF +#define toSigned16(value) \ + (__s16)((value) < MinSigned16Value ? MinSigned16Value : ((value) > MaxSigned16Value ? MaxSigned16Value : (value))) + +// OIS to Linux duration +#define LinuxInfiniteDuration 0xFFFF +#define OISDurationUnitMS 1000 // OIS duration unit (microseconds), expressed in milliseconds (theLinux duration unit) + +// linux/input.h : All duration values are expressed in ms. Values above 32767 ms (0x7fff) +// should not be used and have unspecified results. +#define LinuxDuration(oisDuration) ((oisDuration) == Effect::OIS_INFINITE ? LinuxInfiniteDuration \ + : toUnsigned15((oisDuration)/OISDurationUnitMS)) + + +// OIS to Linux levels +#define OISMaxLevel 10000 +#define LinuxMaxLevel 0x7FFF + +// linux/input.h : Valid range for the attack and fade levels is 0x0000 - 0x7fff +#define LinuxPositiveLevel(oisLevel) toUnsigned15(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) + +#define LinuxSignedLevel(oisLevel) toSigned16(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) + + +//--------------------------------------------------------------// +void LinuxForceFeedback::_setCommonProperties(struct ff_effect *event, + struct ff_envelope *ffenvelope, + const Effect* effect, const Envelope *envelope ) +{ + memset(event, 0, sizeof(struct ff_effect)); + + if (envelope && ffenvelope && envelope->isUsed()) { + ffenvelope->attack_length = LinuxDuration(envelope->attackLength); + ffenvelope->attack_level = LinuxPositiveLevel(envelope->attackLevel); + ffenvelope->fade_length = LinuxDuration(envelope->fadeLength); + ffenvelope->fade_level = LinuxPositiveLevel(envelope->fadeLevel); + } + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl; + if (envelope && ffenvelope) + { + cout << " Enveloppe :" << endl + << " AttackLen : " << envelope->attackLength + << " => " << ffenvelope->attack_length << endl + << " AttackLvl : " << envelope->attackLevel + << " => " << ffenvelope->attack_level << endl + << " FadeLen : " << envelope->fadeLength + << " => " << ffenvelope->fade_length << endl + << " FadeLvl : " << envelope->fadeLevel + << " => " << ffenvelope->fade_level << endl; + } +#endif + + event->direction = (__u16)(1 + (effect->direction*45.0+135.0)*0xFFFFUL/360.0); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Direction : " << Effect::getDirectionName(effect->direction) + << " => 0x" << hex << event->direction << dec << endl; +#endif + + // TODO trigger_button 0 vs. -1 + event->trigger.button = effect->trigger_button; // < 0 ? 0 : effect->trigger_button; + event->trigger.interval = LinuxDuration(effect->trigger_interval); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Trigger :" << endl + << " Button : " << effect->trigger_button + << " => " << event->trigger.button << endl + << " Interval : " << effect->trigger_interval + << " => " << event->trigger.interval << endl; +#endif + + event->replay.length = LinuxDuration(effect->replay_length); + event->replay.delay = LinuxDuration(effect->replay_delay); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Replay :" << endl + << " Length : " << effect->replay_length + << " => " << event->replay.length << endl + << " Delay : " << effect->replay_delay + << " => " << event->replay.delay << endl; +#endif +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateConstantEffect( const Effect* eff ) +{ + struct ff_effect event; + + ConstantEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); + + event.type = FF_CONSTANT; + event.id = -1; + + event.u.constant.level = LinuxSignedLevel(effect->level); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Level : " << effect->level + << " => " << event.u.constant.level << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateRampEffect( const Effect* eff ) +{ + struct ff_effect event; + + RampEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); + + event.type = FF_RAMP; + event.id = -1; + + event.u.ramp.start_level = LinuxSignedLevel(effect->startLevel); + event.u.ramp.end_level = LinuxSignedLevel(effect->endLevel); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " StartLevel : " << effect->startLevel + << " => " << event.u.ramp.start_level << endl + << " EndLevel : " << effect->endLevel + << " => " << event.u.ramp.end_level << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updatePeriodicEffect( const Effect* eff ) +{ + struct ff_effect event; + + PeriodicEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.periodic.envelope, eff, &effect->envelope); + + event.type = FF_PERIODIC; + event.id = -1; + + switch( eff->type ) + { + case OIS::Effect::Square: + event.u.periodic.waveform = FF_SQUARE; + break; + case OIS::Effect::Triangle: + event.u.periodic.waveform = FF_TRIANGLE; + break; + case OIS::Effect::Sine: + event.u.periodic.waveform = FF_SINE; + break; + case OIS::Effect::SawToothUp: + event.u.periodic.waveform = FF_SAW_UP; + break; + case OIS::Effect::SawToothDown: + event.u.periodic.waveform = FF_SAW_DOWN; + break; + // Note: No support for Custom periodic force effect for the moment + //case OIS::Effect::Custom: + //event.u.periodic.waveform = FF_CUSTOM; + //break; + default: + OIS_EXCEPT(E_General, "No such available effect for Periodic force!"); + break; + } + + event.u.periodic.period = LinuxDuration(effect->period); + event.u.periodic.magnitude = LinuxPositiveLevel(effect->magnitude); + event.u.periodic.offset = LinuxPositiveLevel(effect->offset); + event.u.periodic.phase = (__u16)(effect->phase*event.u.periodic.period/36000.0); // ????? + + // Note: No support for Custom periodic force effect for the moment + event.u.periodic.custom_len = 0; + event.u.periodic.custom_data = 0; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Magnitude : " << effect->magnitude + << " => " << event.u.periodic.magnitude << endl + << " Period : " << effect->period + << " => " << event.u.periodic.period << endl + << " Offset : " << effect->offset + << " => " << event.u.periodic.offset << endl + << " Phase : " << effect->phase + << " => " << event.u.periodic.phase << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateConditionalEffect( const Effect* eff ) +{ + struct ff_effect event; + + ConditionalEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, NULL, eff, NULL); + + switch( eff->type ) + { + case OIS::Effect::Friction: + event.type = FF_FRICTION; + break; + case OIS::Effect::Damper: + event.type = FF_DAMPER; + break; + case OIS::Effect::Inertia: + event.type = FF_INERTIA; + break; + case OIS::Effect::Spring: + event.type = FF_SPRING; + break; + default: + OIS_EXCEPT(E_General, "No such available effect for Conditional force!"); + break; + } + + event.id = -1; + + event.u.condition[0].right_saturation = LinuxSignedLevel(effect->rightSaturation); + event.u.condition[0].left_saturation = LinuxSignedLevel(effect->leftSaturation); + event.u.condition[0].right_coeff = LinuxSignedLevel(effect->rightCoeff); + event.u.condition[0].left_coeff = LinuxSignedLevel(effect->leftCoeff); + event.u.condition[0].deadband = LinuxPositiveLevel(effect->deadband);// Unit ?? + event.u.condition[0].center = LinuxSignedLevel(effect->center); // Unit ?? TODO ? + + // TODO support for second condition + event.u.condition[1] = event.u.condition[0]; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Condition[0] : " << endl + << " RightSaturation : " << effect->rightSaturation + << " => " << event.u.condition[0].right_saturation << endl + << " LeftSaturation : " << effect->leftSaturation + << " => " << event.u.condition[0]. left_saturation << endl + << " RightCoefficient : " << effect->rightCoeff + << " => " << event.u.condition[0].right_coeff << endl + << " LeftCoefficient : " << effect->leftCoeff + << " => " << event.u.condition[0].left_coeff << endl + << " DeadBand : " << effect->deadband + << " => " << event.u.condition[0].deadband << endl + << " Center : " << effect->center + << " => " << event.u.condition[0].center << endl; + cout << " Condition[1] : Not implemented" << endl; +#endif + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_upload( struct ff_effect* ffeffect, const Effect* effect) +{ + struct ff_effect *linEffect = 0; + + //Get the effect - if it exists + EffectList::iterator i = mEffectList.find(effect->_handle); + //It has been created already + if( i != mEffectList.end() ) + linEffect = i->second; + + if( linEffect == 0 ) + { +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Adding new effect : " + << Effect::getEffectTypeName(effect->type) << endl; +#endif + + //This effect has not yet been created, so create it in the device + if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { + // TODO device full check + // OIS_EXCEPT(E_DeviceFull, "Remove an effect before adding more!"); + OIS_EXCEPT(E_General, "Unknown error creating effect (may be the device is full)->.."); + } + + // Save returned effect handle + effect->_handle = ffeffect->id; + + // Save a copy of the uploaded effect for later simple modifications + linEffect = (struct ff_effect *)calloc(1, sizeof(struct ff_effect)); + memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); + + mEffectList[effect->_handle] = linEffect; + + // Start playing the effect. + _start(effect->_handle); + } + else + { +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Replacing effect : " + << Effect::getEffectTypeName(effect->type) << endl; +#endif + + // Keep same id/handle, as this is just an update in the device. + ffeffect->id = effect->_handle; + + // Update effect in the device. + if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { + OIS_EXCEPT(E_General, "Unknown error updating an effect->.."); + } + + // Update local linEffect for next time. + memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); + } + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << "LinuxForceFeedback("<< mJoyStick + << ") : Effect handle : " << effect->_handle << endl; +#endif +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_stop( int handle) { + struct input_event stop; + + stop.type = EV_FF; + stop.code = handle; + stop.value = 0; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Stopping effect with handle " << handle << endl; +#endif + + if (write(mJoyStick, &stop, sizeof(stop)) != sizeof(stop)) { + OIS_EXCEPT(E_General, "Unknown error stopping effect->.."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_start( int handle) { + struct input_event play; + + play.type = EV_FF; + play.code = handle; + play.value = 1; // Play once. + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Starting effect with handle " << handle << endl; +#endif + + if (write(mJoyStick, &play, sizeof(play)) != sizeof(play)) { + OIS_EXCEPT(E_General, "Unknown error playing effect->.."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_unload( int handle) +{ +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Removing effect with handle " << handle << endl; +#endif + + if (ioctl(mJoyStick, EVIOCRMFF, handle) == -1) { + OIS_EXCEPT(E_General, "Unknown error removing effect->.."); + } +} diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp new file mode 100644 index 0000000..87dd977 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp @@ -0,0 +1,308 @@ +/* +The zlib/libpng License + +Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) + +This software is provided 'as-is', without any express or implied warranty. In no event will +the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial +applications, and to alter it and redistribute it freely, subject to the following +restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim that + you wrote the original software. If you use this software in a product, + an acknowledgment in the product documentation would be appreciated but is + not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "OISConfig.h" + +#include "linux/LinuxJoyStickEvents.h" +#include "linux/LinuxInputManager.h" +#include "linux/LinuxForceFeedback.h" +#include "linux/EventHelpers.h" + +#include "OISEvents.h" +#include "OISException.h" + +#include //Needed to Open a file descriptor +#ifdef HAVE_UNISTD_H +#include +#endif +#include +#include + + +#include +# include +using namespace std; + +using namespace OIS; + +//#define OIS_LINUX_JOY_DEBUG + +//-------------------------------------------------------------------// +LinuxJoyStick::LinuxJoyStick(InputManager* creator, bool buffered, const JoyStickInfo& js) + : JoyStick(js.vendor, buffered, js.devId, creator) +{ + mJoyStick = js.joyFileD; + + mState.mAxes.clear(); + mState.mAxes.resize(js.axes); + mState.mButtons.clear(); + mState.mButtons.resize(js.buttons); + + mPOVs = js.hats; + + mButtonMap = js.button_map; + mAxisMap = js.axis_map; + mRanges = js.axis_range; + + ff_effect = 0; +} + +//-------------------------------------------------------------------// +LinuxJoyStick::~LinuxJoyStick() +{ + EventUtils::removeForceFeedback( &ff_effect ); +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::_initialize() +{ + //Clear old joy state + mState.mAxes.resize(mAxisMap.size()); + mState.clear(); + + //This will create and new us a force feedback structure if it exists + EventUtils::enumerateForceFeedback( mJoyStick, &ff_effect ); + + if( mJoyStick == -1 ) + OIS_EXCEPT(E_InputDeviceNonExistant, "LinuxJoyStick::_initialize() >> JoyStick Not Found!"); +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::capture() +{ + static const short POV_MASK[8] = {0,0,1,1,2,2,3,3}; + + //Used to determine if an axis has been changed and needs an event + bool axisMoved[32] = {false, false, false, false, false, false, false, false, false, false, false, false, false, + false, false, false, false, false, false, false, false, false, false, false, false, false, + false, false, false, false, false, false}; + + //We are in non blocking mode - we just read once, and try to fill up buffer + input_event js[JOY_BUFFERSIZE]; + while(true) + { + int ret = read(mJoyStick, &js, sizeof(struct input_event) * JOY_BUFFERSIZE); + if( ret < 0 ) + break; + + //Determine how many whole events re read up + ret /= sizeof(struct input_event); + for(int i = 0; i < ret; ++i) + { + switch(js[i].type) + { + case EV_KEY: //Button + { + int button = mButtonMap[js[i].code]; + + #ifdef OIS_LINUX_JOY_DEBUG + cout << "\nButton Code: " << js[i].code << ", OIS Value: " << button << endl; + #endif + + //Check to see whether push or released event... + if(js[i].value) + { + mState.mButtons[button] = true; + if( mBuffered && mListener ) + if(!mListener->buttonPressed(JoyStickEvent(this,mState), button)) return; + } + else + { + mState.mButtons[button] = false; + if( mBuffered && mListener ) + if(!mListener->buttonReleased(JoyStickEvent(this,mState), button)) return; + } + break; + } + + case EV_ABS: //Absolute Axis + { + //A Stick (BrakeDefine is the highest possible Axis) + if( js[i].code <= ABS_BRAKE ) + { + int axis = mAxisMap[js[i].code]; + assert( axis < 32 && "Too many axes (Max supported is 32). Report this to OIS forums!" ); + + axisMoved[axis] = true; + + //check for rescaling: + if( mRanges[axis].min == JoyStick::MIN_AXIS && mRanges[axis].max != JoyStick::MAX_AXIS ) + { //Scale is perfect + mState.mAxes[axis].abs = js[i].value; + } + else + { //Rescale + float proportion = (float)(js[i].value-mRanges[axis].max)/(float)(mRanges[axis].min-mRanges[axis].max); + mState.mAxes[axis].abs = (int)(32767.0f - (65535.0f * proportion)); + } + } + else if( js[i].code <= ABS_HAT3Y ) //A POV - Max four POVs allowed + { + //Normalise the POV to between 0-7 + //Even is X Axis, Odd is Y Axis + unsigned char LinuxPovNumber = js[i].code - 16; + short OIS_POVIndex = POV_MASK[LinuxPovNumber]; + + //Handle X Axis first (Even) (left right) + if((LinuxPovNumber & 0x0001) == 0) + { + //Why do this? Because, we use a bit field, and when this axis is east, + //it can't possibly be west too. So clear out the two X axes, then refil + //it in with the new direction bit. + //Clear the East/West Bit Flags first + mState.mPOV[OIS_POVIndex].direction &= 0x11110011; + if( js[i].value == -1 ) //Left + mState.mPOV[OIS_POVIndex].direction |= Pov::West; + else if( js[i].value == 1 ) //Right + mState.mPOV[OIS_POVIndex].direction |= Pov::East; + } + //Handle Y Axis (Odd) (up down) + else + { + //Clear the North/South Bit Flags first + mState.mPOV[OIS_POVIndex].direction &= 0x11111100; + if( js[i].value == -1 ) //Up + mState.mPOV[OIS_POVIndex].direction |= Pov::North; + else if( js[i].value == 1 ) //Down + mState.mPOV[OIS_POVIndex].direction |= Pov::South; + } + + if( mBuffered && mListener ) + if( mListener->povMoved( JoyStickEvent(this,mState), OIS_POVIndex) == false ) + return; + } + break; + } + + + case EV_REL: //Relative Axes (Do any joystick actually have a relative axis?) + #ifdef OIS_LINUX_JOY_DEBUG + cout << "\nWarning: Relatives axes not supported yet" << endl; + #endif + break; + default: break; + } + } + } + + //All axes and POVs are combined into one movement per pair per captured frame + if( mBuffered && mListener ) + { + for( int i = 0; i < 32; ++i ) + if( axisMoved[i] ) + if( mListener->axisMoved( JoyStickEvent(this,mState), i) == false ) + return; + } +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::setBuffered(bool buffered) +{ + if( buffered != mBuffered ) + { + mBuffered = buffered; + _initialize(); + } +} + +//-------------------------------------------------------------------// +JoyStickInfo LinuxJoyStick::_getJoyInfo() +{ + JoyStickInfo js; + + js.devId = mDevID; + js.joyFileD = mJoyStick; + js.vendor = mVendor; + js.axes = (int)mState.mAxes.size(); + js.buttons = (int)mState.mButtons.size(); + js.hats = mPOVs; + js.button_map = mButtonMap; + js.axis_map = mAxisMap; + js.axis_range = mRanges; + + return js; +} + +//-------------------------------------------------------------------// +JoyStickInfoList LinuxJoyStick::_scanJoys() +{ + JoyStickInfoList joys; + + //Search through all of the event devices.. and identify which ones are joysticks + //xxx move this to InputManager, as it can also scan all other events + for(int i = 0; i < 64; ++i ) + { + stringstream s; + s << "/dev/input/event" << i; + int fd = open( s.str().c_str(), O_RDWR |O_NONBLOCK ); + if(fd == -1) + continue; + + #ifdef OIS_LINUX_JOY_DEBUG + cout << "Opening " << s.str() << "..." << endl; + #endif + try + { + JoyStickInfo js; + if( EventUtils::isJoyStick(fd, js) ) + { + joys.push_back(js); + #ifdef OIS_LINUX_JOY_DEBUG + cout << "=> Joystick added to list." << endl; + #endif + } + else + { + #ifdef OIS_LINUX_JOY_DEBUG + cout << "=> Not a joystick." << endl; + #endif + close(fd); + } + } + catch(...) + { + #ifdef OIS_LINUX_JOY_DEBUG + cout << "Exception caught!!" << endl; + #endif + close(fd); + } + } + + return joys; +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::_clearJoys(JoyStickInfoList &joys) +{ + for(JoyStickInfoList::iterator i = joys.begin(); i != joys.end(); ++i) + close(i->joyFileD); + joys.clear(); +} + +//-------------------------------------------------------------------// +Interface* LinuxJoyStick::queryInterface(Interface::IType type) +{ + if( ff_effect && type == Interface::ForceFeedback ) + return ff_effect; + + return 0; +} diff --git a/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt new file mode 100644 index 0000000..65ae11f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt @@ -0,0 +1,546 @@ +# cmake_minimum_required (VERSION 2.6) +# project (OXYGINE) +# +# include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") +# cmaki_find_package(sdl2 REQUIRED) +# cmaki_find_package(freeimage REQUIRED) +# cmaki_find_package(dune-zlib REQUIRED) +# cmaki_find_package(haxx-libcurl REQUIRED) +# +# include_directories(${CMAKI_INCLUDE_DIRS}) +# set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) +# +# if (EMSCRIPTEN) +# #don't need SDL2 +# elseif (WIN32) +# #hardcoded path to SDL2 on windows +# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# else(WIN32) +# # find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) +# # message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) +# # +# # if (SDL2_INCLUDE_DIRS) +# # set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) +# # message(STATUS "found") +# # else() +# # message(STATUS "SDL notfound") +# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# # endif() +# +# find_package(CURL) +# endif(EMSCRIPTEN) +# +# +# set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) +# set(OXYGINE_SRC ${OXYGINE_ROOT}/src) +# +# set(FOLDERS src src/closure src/minizip src/core +# src/core/gl src/dev_tools src/minizip +# src/math src/pugixml src/json src/res +# src/text_utils src/utils src/winnie_alloc) +# +# +# if (EMSCRIPTEN) +# set(PLATFORM emscripten) +# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") +# set(PLATFORM linux) +# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +# set(PLATFORM ios) +# elseif(MSVC) +# set(PLATFORM win32) +# elseif(MINGW) +# set(PLATFORM win32_mingw) +# endif() +# +# +# set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) +# +# +# +# if (EMSCRIPTEN) +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_HTTP 1) +# set(OX_USE_SDL2 0) +# +# set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) +# +# +# file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) +# +# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") +# +# set(OX_HAVE_LIBJPEG 1) +# set(OX_HAVE_LIBPNG 1) +# +# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +# elseif(MSVC) +# +# set(OX_HAVE_LIBJPEG 1) +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_LIBCURL 1) +# set(OX_HAVE_HTTP 1) +# +# set(libprefix lib) +# +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) +# +# elseif(MINGW) +# +# set(libprefix lib) +# +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_LIBCURL 1) +# set(OX_HAVE_HTTP 1) +# +# endif() +# +# if (OX_HAVE_LIBCURL) +# set(FOLDERS ${FOLDERS} src/core/curl) +# include_directories(${THIRD_PARTY}/curl/) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) +# endif(OX_HAVE_LIBCURL) +# +# +# if (NOT OX_HAVE_HTTP) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) +# endif(NOT OX_HAVE_HTTP) +# +# +# +# foreach(ITEM ${FOLDERS}) +# file(GLOB FLS +# ${OXYGINE_ROOT}/${ITEM}/*.cpp +# ${OXYGINE_ROOT}/${ITEM}/*.c +# ${OXYGINE_ROOT}/${ITEM}/*.h) +# set(SOURCES ${SOURCES} ${FLS}) +# string(REPLACE / \\ SGROUP ${ITEM}) +# source_group(${SGROUP} FILES ${FLS}) +# endforeach(ITEM) +# +# +# set(OXYGINE_INCLUDE_DIRS +# ${OXYGINE_SRC} +# ${THIRD_PARTY}/pthreads/include/ +# ${THIRD_PARTY}/zlib) +# +# +# set(OXYGINE_LIBRARY_DIRS +# ${OXYGINE_LIBRARY_DIRS} +# ${OXYGINE_SOURCE_DIR}/libs +# ${THIRD_PARTY}/libraries) +# +# +# if (FORCE_GLES) +# set(OPENGL_LIBRARIES libGLESv2.lib) +# endif(FORCE_GLES) +# +# +# if (MINGW) +# set(CORE_LIBS ${CORE_LIBS} mingw32) +# endif(MINGW) +# +# +# set(CORE_LIBS +# ${CORE_LIBS} +# oxygine-framework +# ${OPENGL_LIBRARIES} +# ) +# +# +# if (OX_USE_SDL2) +# set(CORE_LIBS ${CORE_LIBS} +# SDL2main SDL2) +# set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) +# endif(OX_USE_SDL2) +# +# +# if (WIN32) +# set(CORE_LIBS ${CORE_LIBS} +# pthreadVCE2 +# libcurl_imp +# ws2_32) +# elseif(EMSCRIPTEN) +# else(WIN32) +# set(CORE_LIBS ${CORE_LIBS} pthread) +# endif(WIN32) +# +# +# +# if (OX_HAVE_LIBPNG) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) +# include_directories(${THIRD_PARTY}/libpng) +# set(LIBPNG ${libprefix}png) +# +# if (MSVC) +# if (MSVC_VERSION EQUAL "1900") +# set(LIBPNG ${LIBPNG}-2015) +# endif() +# elseif(EMSCRIPTEN) +# set(LIBPNG libz libpng16) +# endif() +# +# set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) +# endif(OX_HAVE_LIBPNG) +# +# +# if (OX_HAVE_LIBJPEG) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) +# include_directories(${THIRD_PARTY}/libjpeg) +# set(LIBJPEG ${libprefix}jpeg) +# +# if (MSVC) +# if (MSVC_VERSION EQUAL "1900") +# set(LIBJPEG ${LIBJPEG}-2015) +# endif() +# endif() +# +# set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) +# endif(OX_HAVE_LIBJPEG) +# +# +# if (NOT EMSCRIPTEN) +# set(CORE_LIBS ${CORE_LIBS} +# ${libprefix}z${libprefix}) +# endif(NOT EMSCRIPTEN) +# +# +# if (NOT MSVC) +# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ") +# endif(NOT MSVC) +# +# +# add_definitions(${OX_DEFINITIONS}) +# include_directories(${OXYGINE_INCLUDE_DIRS}) +# add_library(oxygine-framework STATIC ${SOURCES}) +# +# +# set(OXYGINE_LIBRARY_DIRS +# ${OXYGINE_LIBRARY_DIRS} +# PARENT_SCOPE) +# +# set(OXYGINE_CORE_LIBS +# ${CORE_LIBS} +# PARENT_SCOPE) +# +# set(OXYGINE_DEFINITIONS +# ${OX_DEFINITIONS} +# PARENT_SCOPE) +# +# set(OXYGINE_INCLUDE_DIRS +# ${OXYGINE_INCLUDE_DIRS} +# PARENT_SCOPE) +# +# set(OXYGINE_JS_LIBRARIES +# ${OXYGINE_JS_LIBRARIES} +# PARENT_SCOPE) +# +# message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") +# message(STATUS "Libs: ${CORE_LIBS}") +# message(STATUS "Platform: ${PLATFORM}") +# +# set(CMAKE_INSTALL_PREFIX ../libs) +# install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) +# install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) + + + + + + + + + + + + + + + + + + + + + + + + + + +cmake_minimum_required (VERSION 2.6) +project (OXYGINE) + +include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") +cmaki_find_package(sdl2 REQUIRED) +cmaki_find_package(freeimage REQUIRED) +cmaki_find_package(dune-zlib REQUIRED) +cmaki_find_package(haxx-libcurl REQUIRED) + +include_directories(${CMAKI_INCLUDE_DIRS}) +set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) + +# find_package(OpenGL) +# +# if (EMSCRIPTEN) +# #don't need SDL2 +# elseif (WIN32) +# #hardcoded path to SDL2 on windows +# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# else(WIN32) +# find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) +# message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) +# +# if (SDL2_INCLUDE_DIRS) +# set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) +# message(STATUS "found") +# else() +# message(STATUS "SDL not found") +# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# endif() +# +# find_package(CURL) +# endif(EMSCRIPTEN) + + +set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) +set(OXYGINE_SRC ${OXYGINE_ROOT}/src) + +set(FOLDERS src src/closure src/minizip src/core + src/core/gl src/dev_tools src/minizip + src/math src/pugixml src/json src/res + src/text_utils src/utils src/winnie_alloc) + + +if (EMSCRIPTEN) + set(PLATFORM emscripten) +elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + set(PLATFORM linux) +elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set(PLATFORM ios) +elseif(MSVC) + set(PLATFORM win32) +elseif(MINGW) + set(PLATFORM win32_mingw) +endif() + + +set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) + + + +if (EMSCRIPTEN) + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_HTTP 1) + set(OX_USE_SDL2 1) + + set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) + + + file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) + + set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -s USE_SDL=2 -s USE_LIBPNG=1 -s USE_ZLIB=1 -s FULL_ES2=1 ") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s USE_ZLIB=1")#for minizip.c + +elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + + set(OX_HAVE_LIBJPEG 1) + set(OX_HAVE_LIBPNG 1) + +elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +elseif(MSVC) + + set(OX_HAVE_LIBJPEG 1) + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_LIBCURL 1) + set(OX_HAVE_HTTP 1) + + set(libprefix lib) + + set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) + +elseif(MINGW) + + set(libprefix lib) + + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_LIBCURL 1) + set(OX_HAVE_HTTP 1) + +endif() + +if (OX_HAVE_LIBCURL) + set(FOLDERS ${FOLDERS} src/core/curl) + include_directories(${THIRD_PARTY}/curl/) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) +endif(OX_HAVE_LIBCURL) + + + +if (NOT OX_HAVE_HTTP) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) +endif(NOT OX_HAVE_HTTP) + +if (EMSCRIPTEN) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_MT) +endif(EMSCRIPTEN) + +foreach(ITEM ${FOLDERS}) + file(GLOB FLS + ${OXYGINE_ROOT}/${ITEM}/*.cpp + ${OXYGINE_ROOT}/${ITEM}/*.c + ${OXYGINE_ROOT}/${ITEM}/*.h) + set(SOURCES ${SOURCES} ${FLS}) + string(REPLACE / \\ SGROUP ${ITEM}) + source_group(${SGROUP} FILES ${FLS}) +endforeach(ITEM) + + +set(OXYGINE_INCLUDE_DIRS + ${OXYGINE_SRC} + ${THIRD_PARTY}/pthreads/include/ + ${THIRD_PARTY}/zlib) + + +set(OXYGINE_LIBRARY_DIRS + ${OXYGINE_LIBRARY_DIRS} + ${OXYGINE_SOURCE_DIR}/libs + ${THIRD_PARTY}/libraries) + + +if (FORCE_GLES) + set(OPENGL_LIBRARIES libGLESv2.lib) +endif(FORCE_GLES) + + +if (MINGW) + set(CORE_LIBS ${CORE_LIBS} mingw32) +endif(MINGW) + + +set(CORE_LIBS + ${CORE_LIBS} + oxygine-framework + ${OPENGL_LIBRARIES} +) + + +if (OX_USE_SDL2) + set(CORE_LIBS ${CORE_LIBS} + SDL2main SDL2) + set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) +endif(OX_USE_SDL2) + + +if (WIN32) + set(CORE_LIBS ${CORE_LIBS} + pthreadVCE2 + libcurl_imp + ws2_32) +elseif(EMSCRIPTEN) +else(WIN32) + set(CORE_LIBS ${CORE_LIBS} pthread) +endif(WIN32) + + + +if (OX_HAVE_LIBPNG) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) + + if (EMSCRIPTEN) + + else(EMSCRIPTEN) + + include_directories(${THIRD_PARTY}/libpng) + set(LIBPNG ${libprefix}png) + + if (MSVC) + if(NOT (MSVC_VERSION LESS 1900)) + set(LIBPNG ${LIBPNG}-2015) + endif() + endif() + + set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) + + endif(EMSCRIPTEN) + +endif(OX_HAVE_LIBPNG) + + +if (OX_HAVE_LIBJPEG) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) + include_directories(${THIRD_PARTY}/libjpeg) + set(LIBJPEG ${libprefix}jpeg) + + if (MSVC) + if(NOT (MSVC_VERSION LESS 1900)) + set(LIBJPEG ${LIBJPEG}-2015) + endif() + endif() + + set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) +endif(OX_HAVE_LIBJPEG) + + +if (NOT EMSCRIPTEN) + set(CORE_LIBS ${CORE_LIBS} + ${libprefix}z${libprefix}) +endif(NOT EMSCRIPTEN) + + +if (NOT MSVC) + set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -std=c++11 ") +endif(NOT MSVC) + +set(CMAKE_CXX_FLAGS ${OXYGINE_CXX_FLAGS}) + +add_definitions(${OX_DEFINITIONS}) +include_directories(${OXYGINE_INCLUDE_DIRS}) +add_library(oxygine-framework STATIC ${SOURCES}) + + +set(OXYGINE_LIBRARY_DIRS + ${OXYGINE_LIBRARY_DIRS} + PARENT_SCOPE) + +set(OXYGINE_CORE_LIBS + ${CORE_LIBS} + PARENT_SCOPE) + +set(OXYGINE_DEFINITIONS + ${OX_DEFINITIONS} + PARENT_SCOPE) + +set(OXYGINE_INCLUDE_DIRS + ${OXYGINE_INCLUDE_DIRS} + PARENT_SCOPE) + +set(OXYGINE_JS_LIBRARIES + ${OXYGINE_JS_LIBRARIES} + PARENT_SCOPE) + +set(OXYGINE_CXX_FLAGS + ${OXYGINE_CXX_FLAGS} + PARENT_SCOPE) + + + +message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") +message(STATUS "Libs: ${CORE_LIBS}") +message(STATUS "Platform: ${PLATFORM}") + +set(CMAKE_INSTALL_PREFIX ../libs) +install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) +install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml new file mode 100644 index 0000000..bbdc966 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml @@ -0,0 +1,13 @@ +- assimp: + <<: *thirdparty_defaults + version: 3.1.1.0 + mode: dr + source: http://downloads.sourceforge.net/project/assimp/assimp-3.1/assimp-3.1.1.zip + uncompress_strip: assimp-3.1.1 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + targets: + - assimp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml new file mode 100644 index 0000000..e2fe3a4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml @@ -0,0 +1,23 @@ +- box2d: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + cmake_target: null + cmake_prefix: ./Box2D/CMakeLists.txt + cmake_definitions: + - BOX2D_BUILD_EXAMPLES=OFF + - BUILD_SHARED_LIBS=ON + - BOX2D_BUILD_SHARED=ON + - BOX2D_BUILD_STATIC=OFF + # - CMAKE_POSITION_INDEPENDENT_CODE=ON + post_install: + - ./Box2D/Box2D/*.h include/Box2D/ RECURSIVE + - ./Box2D/libBox2D.a lib/ + mode: dr + source: https://github.com/erincatto/Box2D.git + branch: -b v2.3.1 + targets: + - Box2D: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml new file mode 100644 index 0000000..a33a569 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml @@ -0,0 +1,54 @@ +- bullet2: + <<: *thirdparty_defaults + version: 2.83.6.0 + source: https://github.com/bulletphysics/bullet3/archive/2.83.6.tar.gz + uncompress_strip: bullet3-2.83.6 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + references: &bullet2_common_extra + default: + include: + - include/bullet + targets: + - LinearMath: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletCollision: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletDynamics: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletSoftBody: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + # optional targets + - BulletFileLoader: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - ConvexDecomposition: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - GIMPACTUtils: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - HACD: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml new file mode 100644 index 0000000..59a451e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml @@ -0,0 +1,70 @@ +- cryptopp: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + post_install: + - ./*.h include + - ./*.a lib + cmake_target: null + mode: dr + source: https://github.com/weidai11/cryptopp.git + branch: -b CRYPTOPP_5_6_5 + unittest: + | + // https://www.cryptopp.com/wiki/ChannelSwitch + #include + #include + #include + #include + #include + #include + + int main(int argc, char *argv[]) + { + std::string message = "Now is the time for all good men to come to the aide of their country"; + + // Allow user to override default message from command line arg. + if(argc == 2 && argv[1] != NULL) + message = std::string(argv[1]); + + // Set hash variables + std::string s1, s2, s3, s4; + CryptoPP::SHA1 sha1; CryptoPP::SHA224 sha224; CryptoPP::SHA256 sha256; CryptoPP::SHA512 sha512; + + // Run hash functions + CryptoPP::HashFilter f1(sha1, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s1))); + CryptoPP::HashFilter f2(sha224, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s2))); + CryptoPP::HashFilter f3(sha256, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s3))); + CryptoPP::HashFilter f4(sha512, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s4))); + + // Set route to default + CryptoPP::ChannelSwitch cs; + cs.AddDefaultRoute(f1); + cs.AddDefaultRoute(f2); + cs.AddDefaultRoute(f3); + cs.AddDefaultRoute(f4); + + CryptoPP::StringSource ss(message, true /*pumpAll*/, new CryptoPP::Redirector(cs)); + + std::cout << "Message: " << message << std::endl; + std::cout << "SHA-1: " << s1 << std::endl; + std::cout << "SHA-224: " << s2 << std::endl; + std::cout << "SHA-256: " << s3 << std::endl; + std::cout << "SHA-512: " << s4 << std::endl; + } + cmake_definitions: + - BUILD_SHARED=OFF + - BUILD_SHARED_LIBS=OFF + - BUILD_STATIC=ON + - BUILD_TESTING=OFF + targets: + - cryptopp: + info: + <<: *library_static_exact + extra: + default: + definitions: + - -DCRYPTOPP_INIT_PRIORITY=1 + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml new file mode 100644 index 0000000..9ebf7cf --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml @@ -0,0 +1,28 @@ +- dune-freetype: + <<: *thirdparty_defaults + version: 1.0.0.0 + mode: dr + source: http://download.savannah.gnu.org/releases/freetype/freetype-2.6.tar.bz2 + uncompress_strip: freetype-2.6 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + unittest: + | + #include + #include FT_FREETYPE_H + int main() + { + FT_Library library; + FT_Init_FreeType( &library ); + return 0; + } + targets: + - freetype: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/freetype2 + - $PLATFORM/include/freetype2 + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml new file mode 100644 index 0000000..ccb589b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml @@ -0,0 +1,29 @@ +- dune-glew: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + cmake_target: null + mode: dr + source: https://github.com/nigels-com/glew.git + cmake_definitions: + - BUILD_SHARED_LIBS=ON + post_install: + - ./lib/* lib/ RECURSIVE + - ./include/* include/ RECURSIVE + build: + | + #!/bin/bash + pushd auto + make + popd + make -j $CORES + targets: + - GLEW: + info: + <<: *library_dynamic_exact + extra: + macos_64-clang_*-*: null + default: + system_depends: + - GL + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml new file mode 100644 index 0000000..04246cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml @@ -0,0 +1,38 @@ +- dune-zlib: + <<: *thirdparty_defaults + version: 1.2.11.0 + mask: w + source: https://zlib.net/zlib-1.2.11.tar.gz + uncompress_strip: zlib-1.2.11 + unittest: + | + #include + int main() + { + z_stream infstream; + return 0; + } + targets: + - zlib: + info: + <<: *library_dynamic_exact + +- dune-zlib: + <<: *thirdparty_defaults + version: 1.2.11.0 + mask: mls + source: https://zlib.net/zlib-1.2.11.tar.gz + uncompress_strip: zlib-1.2.11 + unittest: + | + #include + int main() + { + z_stream infstream; + return 0; + } + targets: + - z: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml new file mode 100644 index 0000000..1dc4f97 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml @@ -0,0 +1,20 @@ +- fmod: + <<: *thirdparty_defaults + version: 1.0.1.0 + source: $NPP_SERVER/sources/fmodstudioapi11000linux.tar.gz + uncompress_strip: fmodstudioapi11000linux/api/lowlevel + post_install: + - ./lib/x86_64/* lib/ + - ./inc/*.h* include/ + build: + | + #!/bin/bash + echo installing fmod + targets: + - fmod: + info: + <<: *library_dynamic_exact + - fmodL: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml new file mode 100644 index 0000000..856f116 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml @@ -0,0 +1,36 @@ +- freeimage: + <<: *thirdparty_defaults + version: 3.1.7.0 + source: https://github.com/Kanma/FreeImage + cmake_target: null + post_install: + - ./lib/*.a lib/ + targets: + - freeimage: + info: + <<: *library_static_exact + - jpeg: + info: + <<: *library_static_exact + - mng: + info: + <<: *library_static_exact + - openexr: + info: + <<: *library_static_exact + - openjpeg: + info: + <<: *library_static_exact + - png: + info: + <<: *library_static_exact + - rawlite: + info: + <<: *library_static_exact + - tiff: + info: + <<: *library_static_exact + - zlib: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml new file mode 100644 index 0000000..c9352be --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml @@ -0,0 +1,40 @@ +- freeimage: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + cmake_target: null + post_install: + - ./*.h include RECURSIVE + - ./lib/*.a lib + mode: dr + source: https://github.com/Kanma/FreeImage.git + targets: + - freeimage: + info: + <<: *library_static_exact + # - zlib: + # info: + # <<: *library_static_exact + - tiff: + info: + <<: *library_static_exact + - rawlite: + info: + <<: *library_static_exact + - png: + info: + <<: *library_static_exact + - openjpeg: + info: + <<: *library_static_exact + - openexr: + info: + <<: *library_static_exact + - mng: + info: + <<: *library_static_exact + - jpeg: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml new file mode 100644 index 0000000..cf94535 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml @@ -0,0 +1,61 @@ +- google-gmock: + <<: *thirdparty_defaults + mask: w + source: https://github.com/google/googletest.git + branch: -b release-1.8.0 + post_install: + - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE + - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE + cmake_definitions: + - GTEST_LINKED_AS_SHARED_LIBRARY=1 + - BUILD_SHARED_LIBS=ON + - BUILD_GTEST=ON + - BUILD_GMOCK=ON + - gtest_build_samples=OFF + - gtest_build_tests=OFF + - gtest_disable_pthreads=OFF + - gmock_build_tests=OFF + - INSTALL_GTEST=ON + - INSTALL_GMOCK=ON + targets: + - gtest: + info: + <<: *library_dynamic_exact + - gmock: + info: + <<: *library_dynamic_exact + - gmock_main: + info: + <<: *library_dynamic_exact + + +- google-gmock: + <<: *thirdparty_defaults + mask: mls + source: https://github.com/google/googletest.git + branch: -b release-1.8.0 + post_install: + - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE + - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE + cmake_definitions: + - BUILD_SHARED_LIBS=OFF + - BUILD_GTEST=ON + - BUILD_GMOCK=ON + - gtest_build_samples=OFF + - gtest_build_tests=OFF + - gtest_disable_pthreads=OFF + - gmock_build_tests=OFF + - INSTALL_GTEST=ON + - INSTALL_GMOCK=ON + targets: + - gtest: + info: + <<: *library_static_exact + - gmock: + info: + <<: *library_static_exact + - gmock_main: + info: + <<: *library_static_exact + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml new file mode 100644 index 0000000..ffd8870 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml @@ -0,0 +1,11 @@ +- gwen: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + source: https://github.com/garrynewman/GWEN.git + targets: + - gwen: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml new file mode 100644 index 0000000..8c14ec5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml @@ -0,0 +1,71 @@ +- haxx-libcurl: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/curl/curl.git + branch: -b curl-7_59_0 + depends: + - openssl + cmake_definitions: + - CMAKE_USE_OPENSSL=ON + unittest: + | + #include + #include + int main() + { + CURL* curl = curl_easy_init(); + return 0; + } + references: + library_dynamic: &library_dynamic_curl + common: &library_dynamic_common_curl + include: + - $PLATFORM/include + - include + windows: &library_dynamic_windows_curl + <<: *library_dynamic_common_curl + dynamic: + debug: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + lib$TARGET.pdb + relwithdebinfo: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + lib$TARGET.pdb + release: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + null + + unix: &library_dynamic_unix_curl + <<: *library_dynamic_common_curl + dynamic: + debug: + so: + lib/lib$TARGET-d.so + relwithdebinfo: + so: + lib/lib$TARGET.so + release: + so: + lib/lib$TARGET.so + windows_*-msvc_*-*: + <<: *library_dynamic_windows_curl + default: + <<: *library_dynamic_unix_curl + targets: + - curl: + info: + <<: *library_dynamic_curl + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml new file mode 100644 index 0000000..e8920b1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml @@ -0,0 +1,26 @@ +- json: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + post_install: + - ./src/*.h* include + cmake_target: null + source: https://github.com/nlohmann/json.git + branch: -b v3.0.1 + cmake_definitions: + - JSON_BuildTests=OFF + unittest: + | + #include + using json = nlohmann::json; + int main() + { + json j1; + return 0; + } + targets: + - dummy: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml new file mode 100644 index 0000000..05d54dd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml @@ -0,0 +1,24 @@ +- librocket: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + cmake_target: null + post_install: + - ./Include/Rocket/*.h include/Rocket/ recursive + - ./Include/Rocket/*.inl include/Rocket/ recursive + version_manager: git + source: https://github.com/libRocket/libRocket.git + branch: -b stable + depends: + - dune-freetype + targets: + - RocketCore: + info: + <<: *library_dynamic_exact + - RocketDebugger: + info: + <<: *library_dynamic_exact + - RocketControls: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml new file mode 100644 index 0000000..7d76144 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml @@ -0,0 +1,10 @@ +- msgpack: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/msgpack/msgpack-c.git + targets: + - msgpackc: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml new file mode 100644 index 0000000..4cbfa70 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml @@ -0,0 +1,11 @@ +- noise: + <<: *thirdparty_defaults + version: 1.0.0.0 + mode: dr + source: http://downloads.sourceforge.net/project/libnoise/libnoise%20sources/1.0.0/libnoisesrc-1.0.0.zip + uncompress_strip: noise + targets: + - noise: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml new file mode 100644 index 0000000..06bada0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml @@ -0,0 +1,19 @@ +- ois: + <<: *thirdparty_defaults + version: 1.3.0.0 + mode: dr + source: http://downloads.sourceforge.net/project/wgois/Source%20Release/1.3/ois_v1-3.tar.gz + uncompress_strip: ois-v1-3 + build: + | + #!/bin/bash + # depends: libxaw7-dev + source find.script + chmod +x bootstrap + ./bootstrap + ./configure --prefix=$ois_HOME && make -j $CORES && make -j $CORES install + exit $? + targets: + - OIS: + info: + <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml new file mode 100644 index 0000000..4011d09 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml @@ -0,0 +1,24 @@ +- openssl: + <<: *thirdparty_defaults + source: https://github.com/pol51/OpenSSL-CMake.git + branch: -b OpenSSL_1_1_0 + build: + | + #!/bin/bash + # if [[ $BUILD_MODE == 'Debug' ]] + # then + # ./Configure --openssldir=$SELFHOME debug-linux-x86_64 + # else + # ./Configure --openssldir=$SELFHOME linux-x86_64 + # fi + ./config --prefix=$SELFHOME + make + make install + targets: + - ssl: + info: + <<: *library_static_exact + - crypto: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml new file mode 100644 index 0000000..eb53ab4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml @@ -0,0 +1,25 @@ +- oxygine: + <<: *thirdparty_defaults + cmake_target: null + cmake_definitions: + - BUILD_SHARED_LIBS=OFF + - CMAKE_POSITION_INDEPENDENT_CODE=ON + - OX_HAVE_LIBJPEG=1 + - OX_HAVE_LIBPNG=1 + - OX_HAVE_LIBCURL=1 + - OX_HAVE_HTTP=0 + - OX_USE_SDL2=1 + depends: + - sdl2 + - freeimage + - haxx-libcurl + source: https://github.com/oxygine/oxygine-framework.git + targets: + - oxygine-framework: + info: + <<: *library_static_exact + extra: + default: + definitions: + - -DOXYGINE_SDL=1 + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml new file mode 100644 index 0000000..0d9c5f9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml @@ -0,0 +1,22 @@ +- paho-mqtt3: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + post_install: + - ./src/*.h include + source: https://github.com/eclipse/paho.mqtt.c.git + branch: -b develop + cmake_definitions: + - BUILD_SHARED_LIBS=ON + - BUILD_TESTING=OFF + - BUILD_STATIC=OFF + - BUILD_SHARED=ON + targets: + - paho-mqtt3c: + info: + <<: *library_dynamic_exact + - paho-mqtt3a: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml new file mode 100644 index 0000000..5d52565 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml @@ -0,0 +1,21 @@ +- paho-mqttpp3: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + depends: + - paho-mqtt3 + post_install: + - ./src/mqtt/*.h include/mqtt + mode: dr + source: https://github.com/eclipse/paho.mqtt.cpp.git + cmake_definitions: + - BUILD_SHARED_LIBS=ON + - BUILD_TESTING=OFF + - BUILD_STATIC=OFF + - BUILD_SHARED=ON + - PAHO_WITH_SSL=OFF + targets: + - paho-mqttpp3: + info: + <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml new file mode 100644 index 0000000..df8c388 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml @@ -0,0 +1,11 @@ +- pugixml: + <<: *thirdparty_defaults + source: http://github.com/zeux/pugixml/releases/download/v1.8/pugixml-1.8.tar.gz + uncompress_strip: pugixml-1.8 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + targets: + - pugixml: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml new file mode 100644 index 0000000..bc7cb10 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml @@ -0,0 +1,21 @@ +- python: + <<: *thirdparty_defaults + source: https://github.com/python-cmake-buildsystem/python-cmake-buildsystem.git + cmake_definitions: + - BUILD_SHARED=FALSE + - BUILD_STATIC=TRUE + targets: + - python3.5m: + info: + <<: *library_static_exact + extra: + default: + include: + - include/python3.5m + system_depends: + - dl + - util + - python: + info: + <<: *executable_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml new file mode 100644 index 0000000..643b0c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml @@ -0,0 +1,11 @@ +- raknet: + <<: *thirdparty_defaults + cmake_target: null + source: https://github.com/facebookarchive/RakNet.git + post_install: + - ./Source/*.h* include/raknet/ + targets: + - RakNetDLL: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml new file mode 100644 index 0000000..5707070 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml @@ -0,0 +1,17 @@ +- restclient-cpp: + <<: *thirdparty_defaults + source: https://github.com/mrtazz/restclient-cpp + depends: + - haxx-libcurl + build: + | + #!/bin/bash + source $(pwd)/../haxx-libcurl/find.script + ./autogen.sh + CXXFLAGS=-I$haxx_libcurl_HOME/include ./configure --prefix=$SELFHOME + make install + targets: + - restclient-cpp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml new file mode 100644 index 0000000..13d07b4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml @@ -0,0 +1,38 @@ +- sdl2: + <<: *thirdparty_defaults + mask: wl + version: 2.0.8.0 + source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz + uncompress_strip: SDL2-2.0.8 + depends: + - dune-glew + mode: dr + targets: + - SDL2-2.0: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/SDL2 + + +- sdl2: + <<: *thirdparty_defaults + mask: m + version: 2.0.8.0 + source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz + uncompress_strip: SDL2-2.0.8 + depends: + - dune-glew + mode: dr + targets: + - SDL2: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/SDL2 + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml new file mode 100644 index 0000000..29c143d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml @@ -0,0 +1,14 @@ +- spdlog: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/gabime/spdlog.git + branch: -b v0.16.3 + post_install: + - ./include/*.h* include/ RECURSIVE + - ./include/*.cc* include/ RECURSIVE + targets: + - dummy: + info: + <<: *library_dynamic + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml new file mode 100644 index 0000000..d01d5e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml @@ -0,0 +1,49 @@ +- intel-tbb: + <<: *thirdparty_defaults + version: 4.4.0.0 + source: https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20150728oss_src.tgz + uncompress_strip: tbb44_20150728oss + build: + | + #!/bin/bash + source find.script + make info > info_.txt + tail -n +2 info_.txt > info.txt + source info.txt + make + code=$? + # install + cp -Rf include/ $intel_tbb_HOME + if [[ $BUILD_MODE == 'Debug' ]] + then + cp -Rf build/${tbb_build_prefix}_debug/*.so* $intel_tbb_HOME + else + cp -Rf build/${tbb_build_prefix}_release/*.so* $intel_tbb_HOME + fi + for i in $(find $intel_tbb_HOME -name "*.so"); do + name=$(basename $i) + echo rm $i + echo ln -sf $name.2 $i + rm $i + ln -sf $name.2 $i + done + exit $code + + targets: + - tbb: + info: + <<: *library_dynamic_exact + extra: + \*-debug: + definitions: + - -DTBB_USE_DEBUG=1 + default: + definitions: + - -DTBB_USE_DEBUG=0 + - tbbmalloc: + info: + <<: *library_dynamic_exact + - tbbmalloc_proxy: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml new file mode 100644 index 0000000..34d5cc9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml @@ -0,0 +1,16 @@ +- yamlcpp: + <<: *thirdparty_defaults + mode: dr + version: 0.0.0.0 + version_manager: git + cmake_target: null + cmake_definitions: + - BUILD_SHARED_LIBS=ON + post_install: + - ./include/yaml-cpp/*.h include/yaml-cpp RECURSIVE + source: https://github.com/jbeder/yaml-cpp.git + targets: + - yaml-cpp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_generator/packing.py b/node_modules/npm-mas-mas/cmaki_generator/packing.py new file mode 100644 index 0000000..fcb2872 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/packing.py @@ -0,0 +1,139 @@ +import os +import sys +import utils +import logging +import hash_version +from itertools import product +from third_party import platforms +from third_party import get_identifier + + +def print_folder(folder): + for root, dirs, files in os.walk(folder): + path = root.split(os.sep) + logging.info((len(path) - 1) * '... ' + '%s/' % os.path.basename(root)) + for file in files: + logging.info(len(path) * '... ' + '%s' % file) + + +def packing(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + version_git = node.get_version() + packing = node.is_packing() + if not packing: + logging.warning('Skiping package: %s' % package) + return 0 + + manager = node.get_version_manager() + if manager == "git": + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + revision_git = hash_version.get_last_changeset(build_directory, short=False) + version_old = node.get_version() + version_git = hash_version.to_cmaki_version(build_directory, revision_git) + logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) + + current_workspace = node.get_binary_workspace(plat) + current_base = node.get_base_folder() + oldversion = node.get_version() + try: + node.set_version(version_git) + updated_workspace = node.get_binary_workspace(plat) + updated_base = node.get_base_folder() + + current_base2 = os.path.join(current_workspace, current_base) + updated_base2 = os.path.join(current_workspace, updated_base) + logging.debug("from: %s" % current_base2) + logging.debug("to: %s" % updated_base2) + if current_base != updated_base: + utils.move_folder_recursive(current_base2, updated_base2) + logging.debug('-- copy from: {}, {}'.format(current_workspace, os.path.exists(current_workspace))) + logging.debug('-- copy to: {}, {}'.format(updated_workspace, os.path.exists(updated_workspace))) + utils.move_folder_recursive(current_workspace, updated_workspace) + finally: + node.set_version(oldversion) + + node.set_version(version_git) + version = node.get_version() + + # regenerate autoscripts with new version + node.generate_scripts_headers(compiler_replace_maps) + + # # generate versions.cmake + node.generate_3rdpartyversion(parameters.prefix) + + precmd = '' + if utils.is_windows(): + precmd = 'cmake -E ' + + folder_3rdparty = parameters.third_party_dir + output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) + utils.trymkdir(output_3rdparty) + + folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) + utils.trymkdir(folder_mark) + + utils.superverbose(parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) + errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) + logging.debug('errors generating cmakefiles: %d' % errors) + node.ret += abs(errors) + + for plat in platforms: + utils.superverbose(parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) + workspace = node.get_workspace(plat) + current_workspace = node.get_binary_workspace(plat) + utils.trymkdir(current_workspace) + with utils.working_directory(current_workspace): + + logging.info('working directory: {}'.format(current_workspace)) + + if utils.is_windows(): + utils.safe_system('del /s *.ilk') + utils.safe_system('del /s *.exp') + + current_base = node.get_base_folder() + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) + prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) + + logging.info('generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), current_base))) + logging.info('generating md5file %s' % prefix_package_md5) + print_folder(current_base) + + # packing install + gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, current_base) + + node.ret += abs( node.safe_system(gen_targz, compiler_replace_maps) ) + if not os.path.exists(prefix_package): + logging.error('No such file: {}'.format(prefix_package)) + return False + + # calculate md5 file + package_md5 = utils.md5sum(prefix_package) + logging.debug("new package {}, with md5sum {}".format(prefix_package, package_md5)) + with open(prefix_package_md5, 'wt') as f: + f.write('%s\n' % package_md5) + + # packing cmakefiles (more easy distribution) + if not parameters.no_packing_cmakefiles: + for plat in platforms: + current_base = node.get_base_folder() + prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (current_base, plat)) + with utils.working_directory(folder_3rdparty): + + logging.info('working directory: {}'.format(folder_3rdparty)) + + logging.debug('working dir: %s' % folder_3rdparty) + logging.info('generating package cmake %s' % prefix_package_cmake) + print_folder(current_base) + + gen_targz_cmake = '{}tar zcvf {} {}'.format(precmd, prefix_package_cmake, current_base) + node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps) ) + if not os.path.exists(prefix_package_cmake): + logging.error('No such file: {}'.format(prefix_package_cmake)) + return False + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt new file mode 100644 index 0000000..dcb2251 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt @@ -0,0 +1,75 @@ + +#******************************************************************************* +# Copyright (c) 2016 +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Eclipse Public License v1.0 +# and Eclipse Distribution License v1.0 which accompany this distribution. +# +# The Eclipse Public License is available at +# http://www.eclipse.org/legal/epl-v10.html +# and the Eclipse Distribution License is available at +# http://www.eclipse.org/org/documents/edl-v10.php. +# +# Contributors: +# Guilherme Maciel Ferreira - initial version +#*******************************************************************************/ + +## Note: on OS X you should install XCode and the associated command-line tools + +## cmake flags +cmake_minimum_required(VERSION 3.1 FATAL_ERROR) + +## project name +project("paho-mqtt-cpp" LANGUAGES CXX) + +include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) +set(PAHO_MQTT_C_PATH "${paho_mqtt3_LIBDIR}" CACHE PATH "Add a path to paho.mqtt.c library and headers") + +## library name +set(PAHO_MQTT_CPP paho-mqttpp3) + +## build settings +set(PAHO_VERSION_MAJOR 0) +set(PAHO_VERSION_MINOR 9) +set(PAHO_VERSION_PATCH 0) + +set(CLIENT_VERSION ${PAHO_VERSION_MAJOR}.${PAHO_VERSION_MINOR}.${PAHO_VERSION_PATCH}) +set(CPACK_PACKAGE_VERSION_MAJOR ${PAHO_VERSION_MAJOR}) +set(CPACK_PACKAGE_VERSION_MINOR ${PAHO_VERSION_MINOR}) +set(CPACK_PACKAGE_VERSION_PATCH ${PAHO_VERSION_PATCH}) + +## build options +set(PAHO_BUILD_STATIC FALSE CACHE BOOL "Build static library") +set(PAHO_BUILD_SAMPLES FALSE CACHE BOOL "Build sample programs") +set(PAHO_BUILD_DOCUMENTATION FALSE CACHE BOOL "Create and install the HTML based API documentation (requires Doxygen)") +set(PAHO_MQTT_C paho-mqtt3a) +SET(PAHO_WITH_SSL TRUE CACHE BOOL "Flag that defines whether to build ssl-enabled binaries too. ") + +## build flags +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +## build directories + +add_subdirectory(src) +add_subdirectory(src/mqtt) + +if(PAHO_BUILD_SAMPLES) + add_subdirectory(src/samples) +endif() + +if(PAHO_BUILD_DOCUMENTATION) + add_subdirectory(doc) +endif() + +## packaging settings +if(WIN32) + set(CPACK_GENERATOR "ZIP") +elseif(UNIX) + set(CPACK_GENERATOR "TGZ") +endif() + +include(CPack) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt new file mode 100644 index 0000000..d35ab8b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt @@ -0,0 +1,161 @@ +#******************************************************************************* +# Copyright (c) 2016 +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Eclipse Public License v1.0 +# and Eclipse Distribution License v1.0 which accompany this distribution. +# +# The Eclipse Public License is available at +# http://www.eclipse.org/legal/epl-v10.html +# and the Eclipse Distribution License is available at +# http://www.eclipse.org/org/documents/edl-v10.php. +# +# Contributors: +# Guilherme Maciel Ferreira - initial version +#*******************************************************************************/ + +## Note: on OS X you should install XCode and the associated command-line tools + +include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) +set(paho_mqtt3_LIBRARIES paho-mqtt3c paho-mqtt3a) +link_directories("${paho_mqtt3_LIBDIR}") +include_directories("${paho_mqtt3_INCLUDE}") +# TODO: use find_package +# find_package(paho-mqtt3 REQUIRED) + +## include directories +include_directories(${CMAKE_CURRENT_SOURCE_DIR}) + + +## libraries +if(WIN32) + set(LIBS_SYSTEM + ws2_32) +elseif(UNIX) + if(CMAKE_SYSTEM_NAME MATCHES "Linux") + set(LIB_DL dl) + endif() + set(LIBS_SYSTEM + ${LIB_DL} + c + stdc++ + pthread) +endif() + +## use Object Library to optimize compilation +set(COMMON_SRC + async_client.cpp + client.cpp + disconnect_options.cpp + iclient_persistence.cpp + message.cpp + response_options.cpp + ssl_options.cpp + string_collection.cpp + token.cpp + topic.cpp + connect_options.cpp + will_options.cpp) + +if(PAHO_WITH_SSL) + add_definitions(-DOPENSSL) +endif() + +add_library(common_obj OBJECT + ${COMMON_SRC}) + +## set position independent flag (-fPIC on Unix) +set_property(TARGET common_obj + PROPERTY POSITION_INDEPENDENT_CODE ON) + +## create the shared library +add_library(${PAHO_MQTT_CPP} SHARED + $) + +## add dependencies to the shared library +target_link_libraries(${PAHO_MQTT_CPP} + ${LIBS_SYSTEM}) + +## set the shared library soname +set_target_properties(${PAHO_MQTT_CPP} PROPERTIES + VERSION ${CLIENT_VERSION} + SOVERSION ${PAHO_VERSION_MAJOR}) + +## install the shared library +install(TARGETS ${PAHO_MQTT_CPP} + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib + RUNTIME DESTINATION bin) + +## build static version of the Paho MQTT C++ library +if(PAHO_BUILD_STATIC) + ## create the static library + add_library(${PAHO_MQTT_CPP}-static STATIC + $) + + ## add dependencies to the static library + target_link_libraries(${PAHO_MQTT_CPP}-static + ${LIBS_SYSTEM}) + + ## install the static library + install(TARGETS ${PAHO_MQTT_CPP}-static + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib) +endif() + +## extract Paho MQTT C include directory +get_filename_component(PAHO_MQTT_C_DEV_INC_DIR ${PAHO_MQTT_C_PATH}/src ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD_INC_DIR ${PAHO_MQTT_C_PATH}/include ABSOLUTE) +set(PAHO_MQTT_C_INC_DIR + ${PAHO_MQTT_C_DEV_INC_DIR} + ${PAHO_MQTT_C_STD_INC_DIR}) + +## extract Paho MQTT C library directory +get_filename_component(PAHO_MQTT_C_DEV_LIB_DIR ${PAHO_MQTT_C_PATH}/build/output ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD_LIB_DIR ${PAHO_MQTT_C_PATH}/lib ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD64_LIB_DIR ${PAHO_MQTT_C_PATH}/lib64 ABSOLUTE) +set(PAHO_MQTT_C_LIB_DIR + ${PAHO_MQTT_C_DEV_LIB_DIR} + ${PAHO_MQTT_C_STD_LIB_DIR} + ${PAHO_MQTT_C_STD64_LIB_DIR}) + +## extract Paho MQTT C binary directory (Windows may place libraries there) +get_filename_component(PAHO_MQTT_C_BIN_DIR ${PAHO_MQTT_C_PATH}/bin ABSOLUTE) + +## add library suffixes so Windows can find Paho DLLs +set(CMAKE_FIND_LIBRARY_PREFIXES ${CMAKE_FIND_LIBRARY_PREFIXES} "") +set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES} ".dll" ".lib") + +if(PAHO_WITH_SSL) + ## find the Paho MQTT C SSL library + find_library(PAHO_MQTT_C_LIB + NAMES paho-mqtt3as + mqtt3as + PATHS ${PAHO_MQTT_C_LIB_DIR} + ${PAHO_MQTT_C_BIN_DIR}) + + find_package(OpenSSL REQUIRED) +else() + ## find the Paho MQTT C library + find_library(PAHO_MQTT_C_LIB + NAMES paho-mqtt3a + mqtt + paho-mqtt + mqtt3 + paho-mqtt3 + mqtt3a + PATHS ${PAHO_MQTT_C_LIB_DIR} + ${PAHO_MQTT_C_BIN_DIR}) +endif() + +## use the Paho MQTT C library if found. Otherwise terminate the compilation +if(${PAHO_MQTT_C_LIB} STREQUAL "PAHO_MQTT_C_LIB-NOTFOUND") + message(FATAL_ERROR "Could not find Paho MQTT C library") +else() + include_directories(${PAHO_MQTT_C_INC_DIR}) + link_directories(${PAHO_MQTT_C_LIB_DIR}) + target_link_libraries(${PAHO_MQTT_CPP} + ${PAHO_MQTT_C_LIB} + ${paho_mqtt3_LIBRARIES}) +endif() + diff --git a/node_modules/npm-mas-mas/cmaki_generator/pipeline.py b/node_modules/npm-mas-mas/cmaki_generator/pipeline.py new file mode 100644 index 0000000..d0c44ed --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/pipeline.py @@ -0,0 +1,287 @@ +import os +import sys +import logging +import contextlib +import utils +import shutil +from third_party import exceptions_fail_group +from third_party import exceptions_fail_program +from third_party import FailThirdParty + + +def make_pipe(): + def process(): + pass + return process + + +def end_pipe(): + def process(p): + _ = list(p) + return process + + +def _create(): + b = make_pipe() + e = yield b + end_pipe()(e) + yield + + +@contextlib.contextmanager +def create(): + c = _create() + p = next(c) + yield (p, c) + + +def feed(packages): + def process(_): + for node in packages: + yield node + return process + + +def do(function, force, *args, **kwargs): + ''' + skeleton gtc stage + ''' + def process(packages): + def _process(): + for node in packages: + try: + package = node.get_package_name() + version = node.get_version() + + if not force: + # skip process if package came with error + if node.ret != 0: + logging.info('%s %s error detected: skiping' % (function.__name__, package)) + continue + + # skip process if package came interrupted + if node.interrupted: + logging.info('%s %s error detected: skiping' % (function.__name__, package)) + continue + + if function.__name__ != 'purge': + logger_function = logging.info + else: + logger_function = logging.debug + + logger_function('--------- begin@%s: %s (%s) --------' % (function.__name__, package, version)) + + # process package + ret = function(node, *args, **kwargs) + logging.debug('%s: return %s' % (function.__name__, ret)) + if isinstance(ret, bool): + if not ret: + node.ret += 1 + elif isinstance(ret, int): + # aggregation result + node.ret += abs(ret) + else: + logging.error('%s %s error invalid return: %s' % (function.__name__, package, ret)) + node.ret += 1 + + logger_function('--------- end@%s: %s (%s) --------' % (function.__name__, package, version)) + + if node.ret != 0: + node.fail_stage = function.__name__ + raise FailThirdParty('[exception] %s fail in stage: %s' % (package, function.__name__)) + + except FailThirdParty: + logging.error('fatal exception in package %s (%s)' % (package, version)) + node.ret += 1 + node.fail_stage = function.__name__ + raise + except exceptions_fail_group: + logging.error('fatal exception in package %s (%s)' % (package, version)) + node.ret += 1 + # add exception for show postponed + node.exceptions.append(sys.exc_info()) + node.fail_stage = function.__name__ + raise + except exceptions_fail_program: + logging.error('interruption in package %s (%s)' % (package, version)) + node.ret += 1 + node.fail_stage = function.__name__ + node.interrupted = True + raise + except: + # excepciones por fallos de programacion + logging.error('Postponed exception in package %s (%s)' % (package, version)) + node.ret += 1 + node.exceptions.append(sys.exc_info()) + node.fail_stage = function.__name__ + finally: + # send to next step + yield node + + for node in _process(): + yield node + return process + +####################### PIPELINE PROOF CONCEPT (UNDER CODE IS NOT USED) ############### + + +def echo(line): + def process(_): + yield line + return process + + +def cat(): + def process(p): + for line in p: + if(os.path.exists(line)): + with open(line, 'rt') as f: + for line2 in f: + yield line2 + else: + logging.warning(' filename %s not exists' % line) + return process + + +def find(folder, level=999): + def process(_): + for root, dirs, files in utils.walklevel(folder, level): + for name in files: + yield os.path.join(root, name) + return process + + +def grep(pattern): + def process(p): + for line in p: + if line.find(pattern) != -1: + yield line + return process + + +def grep_basename(pattern): + def process(p): + p0 = pattern[:1] + pL = pattern[-1:] + fixed_pattern = pattern.replace('*', '') + for line in p: + if(p0 == '*' and pL != '*'): + if os.path.basename(line).endswith(fixed_pattern): + yield line.replace('\\', '/') + elif(p0 != '*' and pL == '*'): + if os.path.basename(line).startswith(fixed_pattern): + yield line.replace('\\', '/') + else: + if os.path.basename(line).find(fixed_pattern) != -1: + yield line.replace('\\', '/') + return process + + +def grep_v(pattern): + def process(p): + for line in p: + if line.find(pattern) == -1: + yield line + return process + + +def endswith(pattern): + def process(p): + for line in p: + if line.endswith(pattern): + yield line + return process + + +def copy(rootdir, folder): + def process(p): + for line in p: + relfilename = os.path.relpath(line, rootdir) + destiny = os.path.join(folder, relfilename) + destiny_dir = os.path.dirname(destiny) + utils.trymkdir(destiny_dir) + shutil.copyfile(line, destiny) + if not os.path.exists(destiny): + raise Exception("Not exists %s" % destiny) + yield destiny + return process + + +def startswith(pattern): + def process(p): + for line in p: + if line.startswith(pattern): + yield line + return process + + +def printf(prefix = ''): + def process(p): + for line in p: + print("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def info(prefix = ''): + def process(p): + for line in p: + logging.info("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def debug(prefix = ''): + def process(p): + for line in p: + logging.debug("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def write_file(filename, mode='wt'): + def process(p): + content = [] + for line in p: + content.append(line) + with open(filename, mode) as f: + for line in content: + f.write('%s\n' % line.rstrip()) + for line in content: + yield line + return process + + +def tee(filename): + def process(p): + p = printf()(p) + p = write_file(filename)(p) + for line in p: + yield line + return process + + +def example_context(): + # using context + with create() as (p, finisher): + p = find('.')(p) + p = endswith('.cpp')(p) + p = cat()(p) + p = tee('result.txt')(p) + # send last part + finisher.send(p) + + +def example_simple(): + # not using context + p = make_pipe() + # begin + p = find('.', 2)(p) + p = endswith('.yml')(p) + p = grep_v('.build_')(p) + p = tee('result.txt')(p) + # end + end_pipe()(p) + +if __name__ == '__main__': + example_simple() diff --git a/node_modules/npm-mas-mas/cmaki_generator/prepare.py b/node_modules/npm-mas-mas/cmaki_generator/prepare.py new file mode 100644 index 0000000..d15de46 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/prepare.py @@ -0,0 +1,72 @@ +import os +import sys +import utils +import logging +import shutil +from third_party import platforms +from third_party import build_unittests_foldername +from itertools import product +from third_party import prefered + + +def prepare(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + + # source folder + source_dir = os.path.join(os.getcwd(), package) + utils.trymkdir(source_dir) + + # generate .build.sh / .build.cmd if is defined in yaml + node.get_generate_custom_script(source_dir) + + # generate find.script / find.cmd + node.generate_scripts_headers(compiler_replace_maps) + + # read root CMakeLists.txt + with open('CMakeLists.txt', 'rt') as f: + content_cmakelists = f.read() + + # OJO: dejar de borrar cuando reciclemos binarios + node.remove_packages() + + # run_tests or packing + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + logging.info('Preparing mode %s - %s' % (plat, build_mode)) + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + utils.trymkdir(build_directory) + + # download source and prepare in build_directory + node.prepare_third_party(build_directory, compiler_replace_maps) + + # copy source files to build + logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) + utils.copy_folder_recursive(source_dir, build_directory) + + # before copy files + with utils.working_directory(build_directory): + for bc in node.get_before_copy(): + chunks = [x.strip() for x in bc.split(' ') if x] + if len(chunks) != 2: + raise Exception('Invalid value in before_copy: %s' % bc) + logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) + shutil.copy2(chunks[0], chunks[1]) + + # if have cmakelists, insert root cmakelists header + cmake_prefix = node.get_cmake_prefix() + build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') + if os.path.exists(build_cmakelist) and (not node.has_custom_script(source_dir)): + with open(build_cmakelist, 'rt') as f: + content_cmakelists_package = f.read() + with open(build_cmakelist, 'wt') as f: + f.write('%s\n' % content_cmakelists) + f.write('%s\n' % content_cmakelists_package) + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "prepare"') + break + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_generator/purge.py b/node_modules/npm-mas-mas/cmaki_generator/purge.py new file mode 100644 index 0000000..2349465 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/purge.py @@ -0,0 +1,36 @@ +import os +import utils +import logging +from third_party import platforms + +def purge(node, parameters): + + package = node.get_package_name() + + logging.debug("Cleaning headers and cmakefiles %s" % package) + node.remove_scripts_headers() + node.remove_cmakefiles() + + logging.debug("Cleaning download %s" % package) + uncompress_directory = node.get_download_directory() + utils.tryremove_dir(uncompress_directory) + + original_directory = node.get_original_directory() + utils.tryremove_dir(original_directory) + + for plat in platforms: + + if not node.get_exclude_from_clean(): + logging.debug("Cleaning install %s" % package) + utils.tryremove_dir(node.get_install_directory(plat)) + + build_modes = node.get_build_modes() + for build_mode in build_modes: + + logging.debug("Cleaning build %s" % package) + build_directory = node.get_build_directory(plat, build_mode) + utils.tryremove_dir(build_directory) + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt new file mode 100644 index 0000000..618b3f8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt @@ -0,0 +1,34 @@ +cmake_minimum_required(VERSION 2.6) +project(RakNetLibStatic) + +FILE(GLOB ALL_HEADER_SRCS ${RakNet_SOURCE_DIR}/Source/*.h) +FILE(GLOB ALL_CPP_SRCS ${RakNet_SOURCE_DIR}/Source/*.cpp) + +include_directories( ${RAKNET_INTERNAL_INCLUDE_DIRS} ) + +add_library(RakNetLibStatic STATIC ${ALL_CPP_SRCS} ${ALL_HEADER_SRCS} readme.txt) + +IF(WIN32 AND NOT UNIX) + SET( CMAKE_CXX_FLAGS "/D WIN32 /D _RAKNET_LIB /D _CRT_NONSTDC_NO_DEPRECATE /D _CRT_SECURE_NO_DEPRECATE /GS- /GR- ") +ENDIF(WIN32 AND NOT UNIX) + +IF(WIN32 AND NOT UNIX) + target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) + + IF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") + + IF( MSVC10 OR MSVC11 OR MSVC12 OR MSVC14 ) + set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:\"LIBCD.lib LIBCMTD.lib MSVCRT.lib\"" ) + ELSE() + set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:"LIBCD.lib LIBCMTD.lib MSVCRT.lib"" ) + ENDIF() + + ENDIF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") + +ELSE(WIN32 AND NOT UNIX) + target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) + INSTALL(TARGETS RakNetLibStatic DESTINATION ${RakNet_SOURCE_DIR}/Lib/RakNetLibStatic) + INSTALL(FILES ${ALL_HEADER_SRCS} DESTINATION ${RakNet_SOURCE_DIR}/include/raknet) +ENDIF(WIN32 AND NOT UNIX) + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp b/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp new file mode 100644 index 0000000..8f20dfa --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp @@ -0,0 +1,372 @@ +/* + * Copyright (c) 2014, Oculus VR, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * LICENSE file in the root directory of this source tree. An additional grant + * of patent rights can be found in the PATENTS file in the same directory. + * + */ + +#include "CCRakNetSlidingWindow.h" + +#if USE_SLIDING_WINDOW_CONGESTION_CONTROL==1 + +static const double UNSET_TIME_US=-1; + +#if CC_TIME_TYPE_BYTES==4 +static const CCTimeType SYN=10; +#else +static const CCTimeType SYN=10000; +#endif + +#include "MTUSize.h" +#include +#include +#include +#include "RakAssert.h" +#include "RakAlloca.h" + +using namespace RakNet; + +// ****************************************************** PUBLIC METHODS ****************************************************** + +CCRakNetSlidingWindow::CCRakNetSlidingWindow() +{ +} +// ---------------------------------------------------------------------------------------------------------------------------- +CCRakNetSlidingWindow::~CCRakNetSlidingWindow() +{ + +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::Init(CCTimeType curTime, uint32_t maxDatagramPayload) +{ + (void) curTime; + + lastRtt=estimatedRTT=deviationRtt=UNSET_TIME_US; + RakAssert(maxDatagramPayload <= MAXIMUM_MTU_SIZE); + MAXIMUM_MTU_INCLUDING_UDP_HEADER=maxDatagramPayload; + cwnd=maxDatagramPayload; + ssThresh=0.0; + oldestUnsentAck=0; + nextDatagramSequenceNumber=0; + nextCongestionControlBlock=0; + backoffThisBlock=speedUpThisBlock=false; + expectedNextSequenceNumber=0; + _isContinuousSend=false; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::Update(CCTimeType curTime, bool hasDataToSendOrResend) +{ + (void) curTime; + (void) hasDataToSendOrResend; +} +// ---------------------------------------------------------------------------------------------------------------------------- +int CCRakNetSlidingWindow::GetRetransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) +{ + (void) curTime; + (void) isContinuousSend; + (void) timeSinceLastTick; + + return unacknowledgedBytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +int CCRakNetSlidingWindow::GetTransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) +{ + (void) curTime; + (void) timeSinceLastTick; + + _isContinuousSend=isContinuousSend; + + if (unacknowledgedBytes<=cwnd) + return (int) (cwnd-unacknowledgedBytes); + else + return 0; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::ShouldSendACKs(CCTimeType curTime, CCTimeType estimatedTimeToNextTick) +{ + CCTimeType rto = GetSenderRTOForACK(); + (void) estimatedTimeToNextTick; + + // iphone crashes on comparison between double and int64 http://www.jenkinssoftware.com/forum/index.php?topic=2717.0 + if (rto==(CCTimeType) UNSET_TIME_US) + { + // Unknown how long until the remote system will retransmit, so better send right away + return true; + } + + return curTime >= oldestUnsentAck + SYN; +} +// ---------------------------------------------------------------------------------------------------------------------------- +DatagramSequenceNumberType CCRakNetSlidingWindow::GetNextDatagramSequenceNumber(void) +{ + return nextDatagramSequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +DatagramSequenceNumberType CCRakNetSlidingWindow::GetAndIncrementNextDatagramSequenceNumber(void) +{ + DatagramSequenceNumberType dsnt=nextDatagramSequenceNumber; + nextDatagramSequenceNumber++; + return dsnt; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendBytes(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnGotPacketPair(DatagramSequenceNumberType datagramSequenceNumber, uint32_t sizeInBytes, CCTimeType curTime) +{ + (void) curTime; + (void) sizeInBytes; + (void) datagramSequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::OnGotPacket(DatagramSequenceNumberType datagramSequenceNumber, bool isContinuousSend, CCTimeType curTime, uint32_t sizeInBytes, uint32_t *skippedMessageCount) +{ + (void) curTime; + (void) sizeInBytes; + (void) isContinuousSend; + + if (oldestUnsentAck==0) + oldestUnsentAck=curTime; + + if (datagramSequenceNumber==expectedNextSequenceNumber) + { + *skippedMessageCount=0; + expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; + } + else if (GreaterThan(datagramSequenceNumber, expectedNextSequenceNumber)) + { + *skippedMessageCount=datagramSequenceNumber-expectedNextSequenceNumber; + // Sanity check, just use timeout resend if this was really valid + if (*skippedMessageCount>1000) + { + // During testing, the nat punchthrough server got 51200 on the first packet. I have no idea where this comes from, but has happened twice + if (*skippedMessageCount>(uint32_t)50000) + return false; + *skippedMessageCount=1000; + } + expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; + } + else + { + *skippedMessageCount=0; + } + + return true; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnResend(CCTimeType curTime, RakNet::TimeUS nextActionTime) +{ + (void) curTime; + (void) nextActionTime; + + if (_isContinuousSend && backoffThisBlock==false && cwnd>MAXIMUM_MTU_INCLUDING_UDP_HEADER*2) + { + // Spec says 1/2 cwnd, but it never recovers because cwnd increases too slowly + //ssThresh=cwnd-8.0 * (MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd); + ssThresh=cwnd/2; + if (ssThresh ssThresh && ssThresh!=0) + cwnd = ssThresh + MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; + + // CC PRINTF + // printf("++ %.0f Slow start increase.\n", cwnd); + + } + else if (isNewCongestionControlPeriod) + { + cwnd+=MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; + + // CC PRINTF + // printf("+ %.0f Congestion avoidance increase.\n", cwnd); + } +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnDuplicateAck( CCTimeType curTime, DatagramSequenceNumberType sequenceNumber ) +{ + (void) curTime; + (void) sequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendAckGetBAndAS(CCTimeType curTime, bool *hasBAndAS, BytesPerMicrosecond *_B, BytesPerMicrosecond *_AS) +{ + (void) curTime; + (void) _B; + (void) _AS; + + *hasBAndAS=false; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendAck(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; + + oldestUnsentAck=0; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendNACK(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; + +} +// ---------------------------------------------------------------------------------------------------------------------------- +CCTimeType CCRakNetSlidingWindow::GetRTOForRetransmission(unsigned char timesSent) const +{ + (void) timesSent; + +#if CC_TIME_TYPE_BYTES==4 + const CCTimeType maxThreshold=2000; + //const CCTimeType minThreshold=100; + const CCTimeType additionalVariance=30; +#else + const CCTimeType maxThreshold=2000000; + //const CCTimeType minThreshold=100000; + const CCTimeType additionalVariance=30000; +#endif + + + if (estimatedRTT==UNSET_TIME_US) + return maxThreshold; + + //double u=1.0f; + double u=2.0f; + double q=4.0f; + + CCTimeType threshhold = (CCTimeType) (u * estimatedRTT + q * deviationRtt) + additionalVariance; + if (threshhold > maxThreshold) + return maxThreshold; + return threshhold; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::SetMTU(uint32_t bytes) +{ + RakAssert(bytes < MAXIMUM_MTU_SIZE); + MAXIMUM_MTU_INCLUDING_UDP_HEADER=bytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +uint32_t CCRakNetSlidingWindow::GetMTU(void) const +{ + return MAXIMUM_MTU_INCLUDING_UDP_HEADER; +} +// ---------------------------------------------------------------------------------------------------------------------------- +BytesPerMicrosecond CCRakNetSlidingWindow::GetLocalReceiveRate(CCTimeType currentTime) const +{ + (void) currentTime; + + return 0; // TODO +} +// ---------------------------------------------------------------------------------------------------------------------------- +double CCRakNetSlidingWindow::GetRTT(void) const +{ + if (lastRtt==UNSET_TIME_US) + return 0.0; + return lastRtt; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::GreaterThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) +{ + // a > b? + const DatagramSequenceNumberType halfSpan =(DatagramSequenceNumberType) (((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2); + return b!=a && b-a>halfSpan; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::LessThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) +{ + // a < b? + const DatagramSequenceNumberType halfSpan = ((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2; + return b!=a && b-aGetNetworkID() < data->replica->GetNetworkID()) + return -1; + if (replica3->GetNetworkID() > data->replica->GetNetworkID()) + return 1; + */ + + // 7/28/2013 - If GetNetworkID chagned during runtime, the list would be out of order and lookup would always fail or go out of bounds + // I remember before that I could not directly compare + if (replica3->referenceIndex < data->replica->referenceIndex) + return -1; + if (replica3->referenceIndex > data->replica->referenceIndex) + return 1; + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +LastSerializationResult::LastSerializationResult() +{ + replica=0; + lastSerializationResultBS=0; + whenLastSerialized = RakNet::GetTime(); +} +LastSerializationResult::~LastSerializationResult() +{ + if (lastSerializationResultBS) + RakNet::OP_DELETE(lastSerializationResultBS,_FILE_AND_LINE_); +} +void LastSerializationResult::AllocBS(void) +{ + if (lastSerializationResultBS==0) + { + lastSerializationResultBS=RakNet::OP_NEW(_FILE_AND_LINE_); + } +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::ReplicaManager3() +{ + defaultSendParameters.orderingChannel=0; + defaultSendParameters.priority=HIGH_PRIORITY; + defaultSendParameters.reliability=RELIABLE_ORDERED; + defaultSendParameters.sendReceipt=0; + autoSerializeInterval=30; + lastAutoSerializeOccurance=0; + autoCreateConnections=true; + autoDestroyConnections=true; + currentlyDeallocatingReplica=0; + + for (unsigned int i=0; i < 255; i++) + worldsArray[i]=0; + + AddWorld(0); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::~ReplicaManager3() +{ + if (autoDestroyConnections) + { + for (unsigned int i=0; i < worldsList.Size(); i++) + { + RakAssert(worldsList[i]->connectionList.Size()==0); + } + } + Clear(true); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetAutoManageConnections(bool autoCreate, bool autoDestroy) +{ + autoCreateConnections=autoCreate; + autoDestroyConnections=autoDestroy; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAutoCreateConnections(void) const +{ + return autoCreateConnections; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAutoDestroyConnections(void) const +{ + return autoDestroyConnections; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::AutoCreateConnectionList( + DataStructures::List &participantListIn, + DataStructures::List &participantListOut, + WorldId worldId) +{ + for (unsigned int index=0; index < participantListIn.Size(); index++) + { + if (GetConnectionByGUID(participantListIn[index], worldId)) + { + Connection_RM3 *connection = AllocConnection(rakPeerInterface->GetSystemAddressFromGuid(participantListIn[index]), participantListIn[index]); + if (connection) + { + PushConnection(connection); + participantListOut.Push(connection, _FILE_AND_LINE_); + } + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::PushConnection(RakNet::Connection_RM3 *newConnection, WorldId worldId) +{ + if (newConnection==0) + return false; + if (GetConnectionByGUID(newConnection->GetRakNetGUID(), worldId)) + return false; + // Was this intended? + RakAssert(newConnection->GetRakNetGUID()!=rakPeerInterface->GetMyGUID()); + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index = world->connectionList.GetIndexOf(newConnection); + if (index==(unsigned int)-1) + { + world->connectionList.Push(newConnection,_FILE_AND_LINE_); + + // Send message to validate the connection + newConnection->SendValidation(rakPeerInterface, worldId); + + Connection_RM3::ConstructionMode constructionMode = newConnection->QueryConstructionMode(); + if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int pushIdx; + for (pushIdx=0; pushIdx < world->userReplicaList.Size(); pushIdx++) + newConnection->OnLocalReference(world->userReplicaList[pushIdx], this); + } + } + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::DeallocReplicaNoBroadcastDestruction(RakNet::Connection_RM3 *connection, RakNet::Replica3 *replica3) +{ + currentlyDeallocatingReplica=replica3; + replica3->DeallocReplica(connection); + currentlyDeallocatingReplica=0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNet::Connection_RM3 * ReplicaManager3::PopConnection(unsigned int index, WorldId worldId) +{ + DataStructures::List replicaList; + DataStructures::List destructionList; + DataStructures::List broadcastList; + RakNet::Connection_RM3 *connection; + unsigned int index2; + RM3ActionOnPopConnection action; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + connection=world->connectionList[index]; + + // Clear out downloadGroup + connection->ClearDownloadGroup(rakPeerInterface); + + RakNetGUID guid = connection->GetRakNetGUID(); + // This might be wrong, I am relying on the variable creatingSystemGuid which is transmitted + // automatically from the first system to reference the object. However, if an object changes + // owners then it is not going to be returned here, and therefore QueryActionOnPopConnection() + // will not be called for the new owner. + GetReplicasCreatedByGuid(guid, replicaList); + + for (index2=0; index2 < replicaList.Size(); index2++) + { + action = replicaList[index2]->QueryActionOnPopConnection(connection); + replicaList[index2]->OnPoppedConnection(connection); + if (action==RM3AOPC_DELETE_REPLICA) + { + if (replicaList[index2]->GetNetworkIDManager()) + destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); + } + else if (action==RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION) + { + if (replicaList[index2]->GetNetworkIDManager()) + destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); + + broadcastList.Push( replicaList[index2], _FILE_AND_LINE_ ); + } + else if (action==RM3AOPC_DO_NOTHING) + { + for (unsigned int index3 = 0; index3 < connection->queryToSerializeReplicaList.Size(); index3++) + { + LastSerializationResult *lsr = connection->queryToSerializeReplicaList[index3]; + lsr->whenLastSerialized=0; + if (lsr->lastSerializationResultBS) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + lsr->lastSerializationResultBS->bitStream[z].Reset(); + } + } + } + } + + BroadcastDestructionList(broadcastList, connection->GetSystemAddress()); + for (index2=0; index2 < destructionList.Size(); index2++) + { + // Do lookup in case DeallocReplica destroyed one of of the later Replica3 instances in the list + Replica3* replicaToDestroy = world->networkIDManager->GET_OBJECT_FROM_ID(destructionList[index2]); + if (replicaToDestroy) + { + replicaToDestroy->PreDestruction(connection); + replicaToDestroy->DeallocReplica(connection); + } + } + + world->connectionList.RemoveAtIndex(index); + return connection; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNet::Connection_RM3 * ReplicaManager3::PopConnection(RakNetGUID guid, WorldId worldId) +{ + unsigned int index; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetRakNetGUID()==guid) + { + return PopConnection(index, worldId); + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Reference(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index = ReferenceInternal(replica3, worldId); + + if (index!=(unsigned int)-1) + { + unsigned int pushIdx; + for (pushIdx=0; pushIdx < world->connectionList.Size(); pushIdx++) + { + Connection_RM3::ConstructionMode constructionMode = world->connectionList[pushIdx]->QueryConstructionMode(); + if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + world->connectionList[pushIdx]->OnLocalReference(replica3, this); + } + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned int ReplicaManager3::ReferenceInternal(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + index = world->userReplicaList.GetIndexOf(replica3); + if (index==(unsigned int)-1) + { + RakAssert(world->networkIDManager); + replica3->SetNetworkIDManager(world->networkIDManager); + // If it crashes on rakPeerInterface==0 then you didn't call RakPeerInterface::AttachPlugin() + if (replica3->creatingSystemGUID==UNASSIGNED_RAKNET_GUID) + replica3->creatingSystemGUID=rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + replica3->replicaManager=this; + if (replica3->referenceIndex==(uint32_t)-1) + { + replica3->referenceIndex=nextReferenceIndex++; + } + world->userReplicaList.Push(replica3,_FILE_AND_LINE_); + return world->userReplicaList.Size()-1; + } + return (unsigned int) -1; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Dereference(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index, index2; + for (index=0; index < world->userReplicaList.Size(); index++) + { + if (world->userReplicaList[index]==replica3) + { + world->userReplicaList.RemoveAtIndex(index); + break; + } + } + + // Remove from all connections + for (index2=0; index2 < world->connectionList.Size(); index2++) + { + world->connectionList[index2]->OnDereference(replica3, this); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::DereferenceList(DataStructures::List &replicaListIn, WorldId worldId) +{ + unsigned int index; + for (index=0; index < replicaListIn.Size(); index++) + Dereference(replicaListIn[index], worldId); +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReplicasCreatedByMe(DataStructures::List &replicaListOut, WorldId worldId) +{ + //RakNetGUID myGuid = rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + GetReplicasCreatedByGuid(rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS), replicaListOut, worldId); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReferencedReplicaList(DataStructures::List &replicaListOut, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + replicaListOut=world->userReplicaList; +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReplicasCreatedByGuid(RakNetGUID guid, DataStructures::List &replicaListOut, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + replicaListOut.Clear(false,_FILE_AND_LINE_); + unsigned int index; + for (index=0; index < world->userReplicaList.Size(); index++) + { + if (world->userReplicaList[index]->creatingSystemGUID==guid) + replicaListOut.Push(world->userReplicaList[index],_FILE_AND_LINE_); + } +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned ReplicaManager3::GetReplicaCount(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->userReplicaList.Size(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3 *ReplicaManager3::GetReplicaAtIndex(unsigned index, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->userReplicaList[index]; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned int ReplicaManager3::GetConnectionCount(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->connectionList.Size(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionAtIndex(unsigned index, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->connectionList[index]; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionBySystemAddress(const SystemAddress &sa, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetSystemAddress()==sa) + { + return world->connectionList[index]; + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionByGUID(RakNetGUID guid, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetRakNetGUID()==guid) + { + return world->connectionList[index]; + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultOrderingChannel(char def) +{ + defaultSendParameters.orderingChannel=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultPacketPriority(PacketPriority def) +{ + defaultSendParameters.priority=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultPacketReliability(PacketReliability def) +{ + defaultSendParameters.reliability=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetAutoSerializeInterval(RakNet::Time intervalMS) +{ + autoSerializeInterval=intervalMS; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetConnectionsThatHaveReplicaConstructed(Replica3 *replica, DataStructures::List &connectionsThatHaveConstructedThisReplica, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + connectionsThatHaveConstructedThisReplica.Clear(false,_FILE_AND_LINE_); + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->HasReplicaConstructed(replica)) + connectionsThatHaveConstructedThisReplica.Push(world->connectionList[index],_FILE_AND_LINE_); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAllConnectionDownloadsCompleted(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetDownloadWasCompleted()==false) + return false; + } + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Clear(bool deleteWorlds) +{ + for (unsigned int i=0; i < worldsList.Size(); i++) + { + worldsList[i]->Clear(this); + if (deleteWorlds) + { + worldsArray[worldsList[i]->worldId]=0; + delete worldsList[i]; + } + } + if (deleteWorlds) + worldsList.Clear(false, _FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::RM3World::RM3World() +{ + networkIDManager=0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::RM3World::Clear(ReplicaManager3 *replicaManager3) +{ + if (replicaManager3->GetAutoDestroyConnections()) + { + for (unsigned int i=0; i < connectionList.Size(); i++) + replicaManager3->DeallocConnection(connectionList[i]); + } + else + { + // Clear out downloadGroup even if not auto destroying the connection, since the packets need to go back to RakPeer + for (unsigned int i=0; i < connectionList.Size(); i++) + connectionList[i]->ClearDownloadGroup(replicaManager3->GetRakPeerInterface()); + } + + for (unsigned int i=0; i < userReplicaList.Size(); i++) + { + userReplicaList[i]->replicaManager=0; + userReplicaList[i]->SetNetworkIDManager(0); + } + connectionList.Clear(true,_FILE_AND_LINE_); + userReplicaList.Clear(true,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PRO ReplicaManager3::GetDefaultSendParameters(void) const +{ + return defaultSendParameters; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::AddWorld(WorldId worldId) +{ + RakAssert(worldsArray[worldId]==0 && "World already in use"); + + RM3World *newWorld = RakNet::OP_NEW(_FILE_AND_LINE_); + newWorld->worldId=worldId; + worldsArray[worldId]=newWorld; + worldsList.Push(newWorld,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::RemoveWorld(WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + for (unsigned int i=0; i < worldsList.Size(); i++) + { + if (worldsList[i]==worldsArray[worldId]) + { + RakNet::OP_DELETE(worldsList[i],_FILE_AND_LINE_); + worldsList.RemoveAtIndexFast(i); + break; + } + } + worldsArray[worldId]=0; + +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +NetworkIDManager *ReplicaManager3::GetNetworkIDManager(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->networkIDManager; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetNetworkIDManager(NetworkIDManager *_networkIDManager, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + world->networkIDManager=_networkIDManager; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnReceive(Packet *packet) +{ + if (packet->length<2) + return RR_CONTINUE_PROCESSING; + + WorldId incomingWorldId; + + RakNet::Time timestamp=0; + unsigned char packetIdentifier, packetDataOffset; + if ( ( unsigned char ) packet->data[ 0 ] == ID_TIMESTAMP ) + { + if ( packet->length > sizeof( unsigned char ) + sizeof( RakNet::Time ) ) + { + packetIdentifier = ( unsigned char ) packet->data[ sizeof( unsigned char ) + sizeof( RakNet::Time ) ]; + // Required for proper endian swapping + RakNet::BitStream tsBs(packet->data+sizeof(MessageID),packet->length-1,false); + tsBs.Read(timestamp); + // Next line assumes worldId is only 1 byte + RakAssert(sizeof(WorldId)==1); + incomingWorldId=packet->data[sizeof( unsigned char )*2 + sizeof( RakNet::Time )]; + packetDataOffset=sizeof( unsigned char )*3 + sizeof( RakNet::Time ); + } + else + return RR_STOP_PROCESSING_AND_DEALLOCATE; + } + else + { + packetIdentifier = ( unsigned char ) packet->data[ 0 ]; + // Next line assumes worldId is only 1 byte + RakAssert(sizeof(WorldId)==1); + incomingWorldId=packet->data[sizeof( unsigned char )]; + packetDataOffset=sizeof( unsigned char )*2; + } + + if (worldsArray[incomingWorldId]==0) + return RR_CONTINUE_PROCESSING; + + switch (packetIdentifier) + { + case ID_REPLICA_MANAGER_CONSTRUCTION: + return OnConstruction(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + case ID_REPLICA_MANAGER_SERIALIZE: + return OnSerialize(packet, packet->data, packet->length, packet->guid, timestamp, packetDataOffset, incomingWorldId); + case ID_REPLICA_MANAGER_DOWNLOAD_STARTED: + if (packet->wasGeneratedLocally==false) + { + return OnDownloadStarted(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + } + else + break; + case ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE: + if (packet->wasGeneratedLocally==false) + { + return OnDownloadComplete(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + } + else + break; + case ID_REPLICA_MANAGER_SCOPE_CHANGE: + { + Connection_RM3 *connection = GetConnectionByGUID(packet->guid, incomingWorldId); + if (connection && connection->isValidated==false) + { + // This connection is now confirmed bidirectional + connection->isValidated=true; + // Reply back on validation + connection->SendValidation(rakPeerInterface,incomingWorldId); + } + } + } + + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::AutoConstructByQuery(ReplicaManager3 *replicaManager3, WorldId worldId) +{ + ValidateLists(replicaManager3); + + ConstructionMode constructionMode = QueryConstructionMode(); + + unsigned int index; + RM3ConstructionState constructionState; + LastSerializationResult *lsr; + index=0; + + constructedReplicasCulled.Clear(false,_FILE_AND_LINE_); + destroyedReplicasCulled.Clear(false,_FILE_AND_LINE_); + + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + while (index < queryToConstructReplicaList.Size()) + { + lsr=queryToConstructReplicaList[index]; + constructionState=lsr->replica->QueryConstruction(this, replicaManager3); + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY || constructionState==RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT) + { + OnReplicaAlreadyExists(index, replicaManager3); + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) + constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + + /* + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) + { + // Serialize construction data to this connection + RakNet::BitStream bsOut; + bsOut.Write((MessageID)ID_REPLICA_MANAGER_3_SERIALIZE_CONSTRUCTION_EXISTING); + bsOut.Write(replicaManager3->GetWorldID()); + NetworkID networkId; + networkId=lsr->replica->GetNetworkID(); + bsOut.Write(networkId); + BitSize_t bitsWritten = bsOut.GetNumberOfBitsUsed(); + lsr->replica->SerializeConstructionExisting(&bsOut, this); + if (bsOut.GetNumberOfBitsUsed()!=bitsWritten) + replicaManager3->SendUnified(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,GetSystemAddress(), false); + } + + // Serialize first serialization to this connection. + // This is done here, as it isn't done in PushConstruction + SerializeParameters sp; + RakNet::BitStream emptyBs; + for (index=0; index < (unsigned int) RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) + { + sp.lastSentBitstream[index]=&emptyBs; + sp.pro[index]=replicaManager3->GetDefaultSendParameters(); + } + sp.bitsWrittenSoFar=0; + sp.destinationConnection=this; + sp.messageTimestamp=0; + sp.whenLastSerialized=0; + + RakNet::Replica3 *replica = lsr->replica; + + RM3SerializationResult res = replica->Serialize(&sp); + if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && + res!=RM3SR_DO_NOT_SERIALIZE && + res!=RM3SR_SERIALIZED_UNIQUELY) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + } + if (SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, replicaManager3->GetRakPeerInterface(), replicaManager3->GetWorldID())==SSICR_SENT_DATA) + lsr->replica->whenLastSerialized=RakNet::GetTimeMS(); + } + */ + } + else if (constructionState==RM3CS_SEND_CONSTRUCTION) + { + OnConstructToThisConnection(index, replicaManager3); + RakAssert(lsr->replica); + constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + } + else if (constructionState==RM3CS_NEVER_CONSTRUCT) + { + OnNeverConstruct(index, replicaManager3); + } + else// if (constructionState==RM3CS_NO_ACTION) + { + // Do nothing + index++; + } + } + + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + RM3DestructionState destructionState; + index=0; + while (index < queryToDestructReplicaList.Size()) + { + lsr=queryToDestructReplicaList[index]; + destructionState=lsr->replica->QueryDestruction(this, replicaManager3); + if (destructionState==RM3DS_SEND_DESTRUCTION) + { + OnSendDestructionFromQuery(index, replicaManager3); + destroyedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + } + else if (destructionState==RM3DS_DO_NOT_QUERY_DESTRUCTION) + { + OnDoNotQueryDestruction(index, replicaManager3); + } + else// if (destructionState==RM3CS_NO_ACTION) + { + // Do nothing + index++; + } + } + } + } + else if (constructionMode==QUERY_CONNECTION_FOR_REPLICA_LIST) + { + QueryReplicaList(constructedReplicasCulled,destroyedReplicasCulled); + + unsigned int idx1, idx2; + + // Create new + for (idx2=0; idx2 < constructedReplicasCulled.Size(); idx2++) + OnConstructToThisConnection(constructedReplicasCulled[idx2], replicaManager3); + + bool exists; + for (idx2=0; idx2 < destroyedReplicasCulled.Size(); idx2++) + { + exists=false; + bool objectExists; + idx1=constructedReplicaList.GetIndexFromKey(destroyedReplicasCulled[idx2], &objectExists); + if (objectExists) + { + constructedReplicaList.RemoveAtIndex(idx1); + + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]->replica==destroyedReplicasCulled[idx2] ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + } + } + } + + SendConstruction(constructedReplicasCulled,destroyedReplicasCulled,replicaManager3->defaultSendParameters,replicaManager3->rakPeerInterface,worldId,replicaManager3); +} +void ReplicaManager3::Update(void) +{ + unsigned int index,index2,index3; + + WorldId worldId; + RM3World *world; + RakNet::Time time = RakNet::GetTime(); + + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + worldId = world->worldId; + + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->isValidated==false) + continue; + world->connectionList[index]->AutoConstructByQuery(this, worldId); + } + } + + if (time - lastAutoSerializeOccurance >= autoSerializeInterval) + { + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + worldId = world->worldId; + + for (index=0; index < world->userReplicaList.Size(); index++) + { + world->userReplicaList[index]->forceSendUntilNextUpdate=false; + world->userReplicaList[index]->OnUserReplicaPreSerializeTick(); + } + + unsigned int index; + SerializeParameters sp; + sp.curTime=time; + Connection_RM3 *connection; + SendSerializeIfChangedResult ssicr; + LastSerializationResult *lsr; + + sp.messageTimestamp=0; + for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) + sp.pro[i]=defaultSendParameters; + index2=0; + for (index=0; index < world->connectionList.Size(); index++) + { + connection = world->connectionList[index]; + sp.bitsWrittenSoFar=0; + index2=0; + sp.destinationConnection=connection; + + DataStructures::List replicasToSerialize; + replicasToSerialize.Clear(true, _FILE_AND_LINE_); + if (connection->QuerySerializationList(replicasToSerialize)) + { + // Update replica->lsr so we can lookup in the next block + // lsr is per connection / per replica + while (index2 < connection->queryToSerializeReplicaList.Size()) + { + connection->queryToSerializeReplicaList[index2]->replica->lsr=connection->queryToSerializeReplicaList[index2]; + index2++; + } + + + // User is manually specifying list of replicas to serialize + index2=0; + while (index2 < replicasToSerialize.Size()) + { + lsr=replicasToSerialize[index2]->lsr; + RakAssert(lsr->replica==replicasToSerialize[index2]); + + sp.whenLastSerialized=lsr->whenLastSerialized; + ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); + if (ssicr==SSICR_SENT_DATA) + lsr->whenLastSerialized=time; + index2++; + } + } + else + { + while (index2 < connection->queryToSerializeReplicaList.Size()) + { + lsr=connection->queryToSerializeReplicaList[index2]; + + sp.destinationConnection=connection; + sp.whenLastSerialized=lsr->whenLastSerialized; + ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); + if (ssicr==SSICR_SENT_DATA) + { + lsr->whenLastSerialized=time; + index2++; + } + else if (ssicr==SSICR_NEVER_SERIALIZE) + { + // Removed from the middle of the list + } + else + index2++; + } + } + } + } + + lastAutoSerializeOccurance=time; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnClosedConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason ) +{ + (void) lostConnectionReason; + (void) systemAddress; + if (autoDestroyConnections) + { + Connection_RM3 *connection = PopConnection(rakNetGUID); + if (connection) + DeallocConnection(connection); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnNewConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, bool isIncoming) +{ + (void) isIncoming; + if (autoCreateConnections) + { + Connection_RM3 *connection = AllocConnection(systemAddress, rakNetGUID); + if (connection) + PushConnection(connection); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnRakPeerShutdown(void) +{ + if (autoDestroyConnections) + { + RM3World *world; + unsigned int index3; + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + + while (world->connectionList.Size()) + { + Connection_RM3 *connection = PopConnection(world->connectionList.Size()-1, world->worldId); + if (connection) + DeallocConnection(connection); + } + } + } + + + Clear(false); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnDetach(void) +{ + OnRakPeerShutdown(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnConstruction(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + RM3World *world = worldsArray[worldId]; + + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + { + // Almost certainly a bug + RakAssert("Got OnConstruction but no connection yet" && 0); + return RR_CONTINUE_PROCESSING; + } + if (connection->groupConstructionAndSerialize) + { + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + uint16_t constructionObjectListSize, destructionObjectListSize, index, index2; + BitSize_t streamEnd, writeAllocationIDEnd; + Replica3 *replica; + NetworkID networkId; + RakNetGUID creatingSystemGuid; + bool actuallyCreateObject=false; + + DataStructures::List actuallyCreateObjectList; + DataStructures::List constructionTickStack; + + RakAssert(world->networkIDManager); + + bsIn.Read(constructionObjectListSize); + for (index=0; index < constructionObjectListSize; index++) + { + bsIn.Read(streamEnd); + bsIn.Read(networkId); + Replica3* existingReplica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + bsIn.Read(actuallyCreateObject); + actuallyCreateObjectList.Push(actuallyCreateObject, _FILE_AND_LINE_); + bsIn.AlignReadToByteBoundary(); + + if (actuallyCreateObject) + { + bsIn.Read(creatingSystemGuid); + bsIn.Read(writeAllocationIDEnd); + + //printf("OnConstruction: %i\n",networkId.guid.g); // Removeme + if (existingReplica) + { + existingReplica->replicaManager=this; + + // Network ID already in use + connection->OnDownloadExisting(existingReplica, this); + + constructionTickStack.Push(0, _FILE_AND_LINE_); + bsIn.SetReadOffset(streamEnd); + continue; + } + + bsIn.AlignReadToByteBoundary(); + replica = connection->AllocReplica(&bsIn, this); + if (replica==0) + { + constructionTickStack.Push(0, _FILE_AND_LINE_); + bsIn.SetReadOffset(streamEnd); + continue; + } + + // Go past the bitStream written to with WriteAllocationID(). Necessary in case the user didn't read out the bitStream the same way it was written + // bitOffset2 is already aligned + bsIn.SetReadOffset(writeAllocationIDEnd); + + replica->SetNetworkIDManager(world->networkIDManager); + replica->SetNetworkID(networkId); + + replica->replicaManager=this; + replica->creatingSystemGUID=creatingSystemGuid; + + if (!replica->QueryRemoteConstruction(connection) || + !replica->DeserializeConstruction(&bsIn, connection)) + { + DeallocReplicaNoBroadcastDestruction(connection, replica); + bsIn.SetReadOffset(streamEnd); + constructionTickStack.Push(0, _FILE_AND_LINE_); + continue; + } + + constructionTickStack.Push(replica, _FILE_AND_LINE_); + + // Register the replica + ReferenceInternal(replica, worldId); + } + else + { + if (existingReplica) + { + existingReplica->DeserializeConstructionExisting(&bsIn, connection); + constructionTickStack.Push(existingReplica, _FILE_AND_LINE_); + } + else + { + constructionTickStack.Push(0, _FILE_AND_LINE_); + } + } + + + bsIn.SetReadOffset(streamEnd); + bsIn.AlignReadToByteBoundary(); + } + + RakAssert(constructionTickStack.Size()==constructionObjectListSize); + RakAssert(actuallyCreateObjectList.Size()==constructionObjectListSize); + + RakNet::BitStream empty; + for (index=0; index < constructionObjectListSize; index++) + { + bool pdcWritten=false; + bsIn.Read(pdcWritten); + if (pdcWritten) + { + bsIn.AlignReadToByteBoundary(); + bsIn.Read(streamEnd); + bsIn.Read(networkId); + if (constructionTickStack[index]!=0) + { + bsIn.AlignReadToByteBoundary(); + if (actuallyCreateObjectList[index]) + constructionTickStack[index]->PostDeserializeConstruction(&bsIn, connection); + else + constructionTickStack[index]->PostDeserializeConstructionExisting(&bsIn, connection); + } + bsIn.SetReadOffset(streamEnd); + } + else + { + if (constructionTickStack[index]!=0) + { + if (actuallyCreateObjectList[index]) + constructionTickStack[index]->PostDeserializeConstruction(&empty, connection); + else + constructionTickStack[index]->PostDeserializeConstructionExisting(&empty, connection); + } + } + } + + for (index=0; index < constructionObjectListSize; index++) + { + if (constructionTickStack[index]!=0) + { + if (actuallyCreateObjectList[index]) + { + // Tell the connection(s) that this object exists since they just sent it to us + connection->OnDownloadFromThisSystem(constructionTickStack[index], this); + + for (index2=0; index2 < world->connectionList.Size(); index2++) + { + if (world->connectionList[index2]!=connection) + world->connectionList[index2]->OnDownloadFromOtherSystem(constructionTickStack[index], this); + } + } + } + } + + // Destructions + bool b = bsIn.Read(destructionObjectListSize); + (void) b; + RakAssert(b); + for (index=0; index < destructionObjectListSize; index++) + { + bsIn.Read(networkId); + bsIn.Read(streamEnd); + replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + if (replica==0) + { + // Unknown object + bsIn.SetReadOffset(streamEnd); + continue; + } + bsIn.Read(replica->deletingSystemGUID); + if (replica->DeserializeDestruction(&bsIn,connection)) + { + // Make sure it wasn't deleted in DeserializeDestruction + if (world->networkIDManager->GET_OBJECT_FROM_ID(networkId)) + { + replica->PreDestruction(connection); + + // Forward deletion by remote system + if (replica->QueryRelayDestruction(connection)) + BroadcastDestruction(replica,connection->GetSystemAddress()); + Dereference(replica); + DeallocReplicaNoBroadcastDestruction(connection, replica); + } + } + else + { + replica->PreDestruction(connection); + connection->OnDereference(replica, this); + } + + bsIn.AlignReadToByteBoundary(); + } + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnSerialize(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, RakNet::Time timestamp, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + if (connection->groupConstructionAndSerialize) + { + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + RM3World *world = worldsArray[worldId]; + RakAssert(world->networkIDManager); + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + + struct DeserializeParameters ds; + ds.timeStamp=timestamp; + ds.sourceConnection=connection; + + Replica3 *replica; + NetworkID networkId; + BitSize_t bitsUsed; + bsIn.Read(networkId); + //printf("OnSerialize: %i\n",networkId.guid.g); // Removeme + replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + if (replica) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + bsIn.Read(ds.bitstreamWrittenTo[z]); + if (ds.bitstreamWrittenTo[z]) + { + bsIn.ReadCompressed(bitsUsed); + bsIn.AlignReadToByteBoundary(); + bsIn.Read(ds.serializationBitstream[z], bitsUsed); + } + } + replica->Deserialize(&ds); + } + return RR_CONTINUE_PROCESSING; +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnDownloadStarted(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + if (connection->QueryGroupDownloadMessages() && + // ID_DOWNLOAD_STARTED will be processed twice, being processed the second time once ID_DOWNLOAD_COMPLETE arrives. + // However, the second time groupConstructionAndSerialize will be set to true so it won't be processed a third time + connection->groupConstructionAndSerialize==false + ) + { + // These messages will be held by the plugin and returned when the download is complete + connection->groupConstructionAndSerialize=true; + RakAssert(connection->downloadGroup.Size()==0); + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + connection->groupConstructionAndSerialize=false; + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + connection->DeserializeOnDownloadStarted(&bsIn); + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnDownloadComplete(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + + if (connection->groupConstructionAndSerialize==true && connection->downloadGroup.Size()>0) + { + // Push back buffered packets in front of this one + unsigned int i; + for (i=0; i < connection->downloadGroup.Size(); i++) + rakPeerInterface->PushBackPacket(connection->downloadGroup[i],false); + + // Push this one to be last too. It will be processed again, but the second time + // groupConstructionAndSerialize will be false and downloadGroup will be empty, so it will go past this block + connection->downloadGroup.Clear(__FILE__,__LINE__); + rakPeerInterface->PushBackPacket(packet,false); + + return RR_STOP_PROCESSING; + } + + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + connection->gotDownloadComplete=true; + connection->DeserializeOnDownloadComplete(&bsIn); + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3* ReplicaManager3::GetReplicaByNetworkID(NetworkID networkId, WorldId worldId) +{ + RM3World *world = worldsArray[worldId]; + + unsigned int i; + for (i=0; i < world->userReplicaList.Size(); i++) + { + if (world->userReplicaList[i]->GetNetworkID()==networkId) + return world->userReplicaList[i]; + } + return 0; +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +void ReplicaManager3::BroadcastDestructionList(DataStructures::List &replicaListSource, const SystemAddress &exclusionAddress, WorldId worldId) +{ + RakNet::BitStream bsOut; + unsigned int i,j; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + DataStructures::List replicaList; + + for (i=0; i < replicaListSource.Size(); i++) + { + if (replicaListSource[i]==currentlyDeallocatingReplica) + continue; + replicaList.Push(replicaListSource[i], __FILE__, __LINE__); + } + + if (replicaList.Size()==0) + return; + + for (i=0; i < replicaList.Size(); i++) + { + if (replicaList[i]->deletingSystemGUID==UNASSIGNED_RAKNET_GUID) + replicaList[i]->deletingSystemGUID=GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + } + + for (j=0; j < world->connectionList.Size(); j++) + { + if (world->connectionList[j]->GetSystemAddress()==exclusionAddress) + continue; + + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); + bsOut.Write(worldId); + uint16_t cnt=0; + bsOut.Write(cnt); // No construction + cnt=(uint16_t) replicaList.Size(); + BitSize_t cntOffset=bsOut.GetWriteOffset();; + bsOut.Write(cnt); // Overwritten at send call + cnt=0; + + for (i=0; i < replicaList.Size(); i++) + { + if (world->connectionList[j]->HasReplicaConstructed(replicaList[i])==false) + continue; + cnt++; + + NetworkID networkId; + networkId=replicaList[i]->GetNetworkID(); + bsOut.Write(networkId); + BitSize_t offsetStart, offsetEnd; + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); + bsOut.Write(replicaList[i]->deletingSystemGUID); + replicaList[i]->SerializeDestruction(&bsOut, world->connectionList[j]); + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + + if (cnt>0) + { + BitSize_t curOffset=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(cntOffset); + bsOut.Write(cnt); + bsOut.SetWriteOffset(curOffset); + rakPeerInterface->Send(&bsOut,defaultSendParameters.priority,defaultSendParameters.reliability,defaultSendParameters.orderingChannel,world->connectionList[j]->GetSystemAddress(),false, defaultSendParameters.sendReceipt); + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +void ReplicaManager3::BroadcastDestruction(Replica3 *replica, const SystemAddress &exclusionAddress) +{ + DataStructures::List replicaList; + replicaList.Push(replica, _FILE_AND_LINE_ ); + BroadcastDestructionList(replicaList,exclusionAddress); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3::Connection_RM3(const SystemAddress &_systemAddress, RakNetGUID _guid) +: systemAddress(_systemAddress), guid(_guid) +{ + isValidated=false; + isFirstConstruction=true; + groupConstructionAndSerialize=false; + gotDownloadComplete=false; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3::~Connection_RM3() +{ + unsigned int i; + for (i=0; i < constructedReplicaList.Size(); i++) + RakNet::OP_DELETE(constructedReplicaList[i], _FILE_AND_LINE_); + for (i=0; i < queryToConstructReplicaList.Size(); i++) + RakNet::OP_DELETE(queryToConstructReplicaList[i], _FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::GetConstructedReplicas(DataStructures::List &objectsTheyDoHave) +{ + objectsTheyDoHave.Clear(true,_FILE_AND_LINE_); + for (unsigned int idx=0; idx < constructedReplicaList.Size(); idx++) + { + objectsTheyDoHave.Push(constructedReplicaList[idx]->replica, _FILE_AND_LINE_ ); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Connection_RM3::HasReplicaConstructed(RakNet::Replica3 *replica) +{ + bool objectExists; + constructedReplicaList.GetIndexFromKey(replica, &objectExists); + return objectExists; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::SendSerializeHeader(RakNet::Replica3 *replica, RakNet::Time timestamp, RakNet::BitStream *bs, WorldId worldId) +{ + bs->Reset(); + + if (timestamp!=0) + { + bs->Write((MessageID)ID_TIMESTAMP); + bs->Write(timestamp); + } + bs->Write((MessageID)ID_REPLICA_MANAGER_SERIALIZE); + bs->Write(worldId); + bs->Write(replica->GetNetworkID()); +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::ClearDownloadGroup(RakPeerInterface *rakPeerInterface) +{ + unsigned int i; + for (i=0; i < downloadGroup.Size(); i++) + rakPeerInterface->DeallocatePacket(downloadGroup[i]); + downloadGroup.Clear(__FILE__,__LINE__); +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +SendSerializeIfChangedResult Connection_RM3::SendSerialize(RakNet::Replica3 *replica, bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::BitStream serializationData[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::Time timestamp, PRO sendParameters[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakPeerInterface *rakPeer, unsigned char worldId, RakNet::Time curTime) +{ + bool channelHasData; + BitSize_t sum=0; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (indicesToSend[z]) + sum+=serializationData[z].GetNumberOfBitsUsed(); + } + + RakNet::BitStream out; + BitSize_t bitsPerChannel[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + + if (sum==0) + { + memset(bitsPerChannel, 0, sizeof(bitsPerChannel)); + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + return SSICR_DID_NOT_SEND_DATA; + } + + RakAssert(replica->GetNetworkID()!=UNASSIGNED_NETWORK_ID); + + BitSize_t bitsUsed; + + int channelIndex; + PRO lastPro=sendParameters[0]; + + for (channelIndex=0; channelIndex < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex++) + { + if (channelIndex==0) + { + SendSerializeHeader(replica, timestamp, &out, worldId); + } + else if (lastPro!=sendParameters[channelIndex]) + { + // Write out remainder + for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) + { + bitsPerChannel[channelIndex2]=0; + out.Write(false); + } + + // Send remainder + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); + + // If no data left to send, quit out + bool anyData=false; + for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) + { + if (serializationData[channelIndex2].GetNumberOfBitsUsed()>0) + { + anyData=true; + break; + } + } + if (anyData==false) + return SSICR_SENT_DATA; + + // Restart stream + SendSerializeHeader(replica, timestamp, &out, worldId); + + for (int channelIndex2=0; channelIndex2 < channelIndex; channelIndex2++) + { + bitsPerChannel[channelIndex2]=0; + out.Write(false); + } + lastPro=sendParameters[channelIndex]; + } + + bitsUsed=serializationData[channelIndex].GetNumberOfBitsUsed(); + channelHasData = indicesToSend[channelIndex]==true && bitsUsed>0; + out.Write(channelHasData); + if (channelHasData) + { + bitsPerChannel[channelIndex] = bitsUsed; + out.WriteCompressed(bitsUsed); + out.AlignWriteToByteBoundary(); + out.Write(serializationData[channelIndex]); + // Crap, forgot this line, was a huge bug in that I'd only send to the first 3 systems + serializationData[channelIndex].ResetReadPointer(); + } + else + { + bitsPerChannel[channelIndex] = 0; + } + } + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); + return SSICR_SENT_DATA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +SendSerializeIfChangedResult Connection_RM3::SendSerializeIfChanged(LastSerializationResult *lsr, SerializeParameters *sp, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager, RakNet::Time curTime) +{ + RakNet::Replica3 *replica = lsr->replica; + + if (replica->GetNetworkID()==UNASSIGNED_NETWORK_ID) + return SSICR_DID_NOT_SEND_DATA; + + RM3QuerySerializationResult rm3qsr = replica->QuerySerialization(this); + if (rm3qsr==RM3QSR_NEVER_CALL_SERIALIZE) + { + // Never again for this connection and replica pair + OnNeverSerialize(lsr, replicaManager); + return SSICR_NEVER_SERIALIZE; + } + + if (rm3qsr==RM3QSR_DO_NOT_CALL_SERIALIZE) + return SSICR_DID_NOT_SEND_DATA; + + if (replica->forceSendUntilNextUpdate) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (replica->lastSentSerialization.indicesToSend[z]) + sp->bitsWrittenSoFar+=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed(); + } + return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, replica->lastSentSerialization.bitStream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) + { + sp->outputBitstream[i].Reset(); + if (lsr->lastSerializationResultBS) + sp->lastSentBitstream[i]=&lsr->lastSerializationResultBS->bitStream[i]; + else + sp->lastSentBitstream[i]=&replica->lastSentSerialization.bitStream[i]; + } + + RM3SerializationResult serializationResult = replica->Serialize(sp); + + if (serializationResult==RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION) + { + // Never again for this connection and replica pair + OnNeverSerialize(lsr, replicaManager); + return SSICR_NEVER_SERIALIZE; + } + + if (serializationResult==RM3SR_DO_NOT_SERIALIZE) + { + // Don't serialize this tick only + return SSICR_DID_NOT_SEND_DATA; + } + + // This is necessary in case the user in the Serialize() function for some reason read the bitstream they also wrote + // WIthout this code, the Write calls to another bitstream would not write the entire bitstream + BitSize_t sum=0; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp->outputBitstream[z].ResetReadPointer(); + sum+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + } + + if (sum==0) + { + // Don't serialize this tick only + return SSICR_DID_NOT_SEND_DATA; + } + + if (serializationResult==RM3SR_SERIALIZED_ALWAYS) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + + lsr->AllocBS(); + lsr->lastSerializationResultBS->bitStream[z].Reset(); + lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + } + return SendSerialize(replica, allIndices, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + if (serializationResult==RM3SR_SERIALIZED_ALWAYS_IDENTICALLY) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + replica->lastSentSerialization.indicesToSend[z]=sp->outputBitstream[z].GetNumberOfBitsUsed()>0; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + replica->lastSentSerialization.bitStream[z].Reset(); + replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + replica->forceSendUntilNextUpdate=true; + } + return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && + (serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION || + ((sp->outputBitstream[z].GetNumberOfBitsUsed()!=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed() || + memcmp(sp->outputBitstream[z].GetData(), replica->lastSentSerialization.bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0)))) + { + indicesToSend[z]=true; + replica->lastSentSerialization.indicesToSend[z]=true; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + replica->lastSentSerialization.bitStream[z].Reset(); + replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + replica->forceSendUntilNextUpdate=true; + } + else + { + indicesToSend[z]=false; + replica->lastSentSerialization.indicesToSend[z]=false; + } + } + } + else + { + lsr->AllocBS(); + + // RM3SR_SERIALIZED_UNIQUELY + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && + (sp->outputBitstream[z].GetNumberOfBitsUsed()!=lsr->lastSerializationResultBS->bitStream[z].GetNumberOfBitsUsed() || + memcmp(sp->outputBitstream[z].GetData(), lsr->lastSerializationResultBS->bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0) + ) + { + indicesToSend[z]=true; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + lsr->lastSerializationResultBS->bitStream[z].Reset(); + lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + } + else + { + indicesToSend[z]=false; + } + } + } + + + if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) + replica->forceSendUntilNextUpdate=true; + + // Send out the data + return SendSerialize(replica, indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::OnLocalReference(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + RakAssert(replica3); + (void) replicaManager; + (void) constructionMode; + +#ifdef _DEBUG + for (unsigned int i=0; i < queryToConstructReplicaList.Size(); i++) + { + if (queryToConstructReplicaList[i]->replica==replica3) + { + RakAssert("replica added twice to queryToConstructReplicaList" && 0); + } + } + + if (constructedReplicaList.HasData(replica3)==true) + { + RakAssert("replica added to queryToConstructReplicaList when already in constructedReplicaList" && 0); + } +#endif + + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica3; + queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDereference(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + if (replica3->GetNetworkIDManager() == 0) + return; + + LastSerializationResult* lsr=0; + unsigned int idx; + + bool objectExists; + idx=constructedReplicaList.GetIndexFromKey(replica3, &objectExists); + if (objectExists) + { + lsr=constructedReplicaList[idx]; + constructedReplicaList.RemoveAtIndex(idx); + } + + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica==replica3) + { + lsr=queryToConstructReplicaList[idx]; + queryToConstructReplicaList.RemoveAtIndex(idx); + break; + } + } + + for (idx=0; idx < queryToSerializeReplicaList.Size(); idx++) + { + if (queryToSerializeReplicaList[idx]->replica==replica3) + { + lsr=queryToSerializeReplicaList[idx]; + queryToSerializeReplicaList.RemoveAtIndex(idx); + break; + } + } + + for (idx=0; idx < queryToDestructReplicaList.Size(); idx++) + { + if (queryToDestructReplicaList[idx]->replica==replica3) + { + lsr=queryToDestructReplicaList[idx]; + queryToDestructReplicaList.RemoveAtIndex(idx); + break; + } + } + + ValidateLists(replicaManager); + + if (lsr) + RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadFromThisSystem(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + RakAssert(replica3); + + ValidateLists(replicaManager); + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica3; + + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int j; + for (j=0; j < queryToConstructReplicaList.Size(); j++) + { + if (queryToConstructReplicaList[j]->replica==replica3 ) + { + queryToConstructReplicaList.RemoveAtIndex(j); + break; + } + } + + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + } + + if (constructedReplicaList.Insert(lsr->replica, lsr, true, _FILE_AND_LINE_) != (unsigned) -1) + { + //assert(queryToSerializeReplicaList.GetIndexOf(replica3)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + } + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadFromOtherSystem(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int j; + for (j=0; j < queryToConstructReplicaList.Size(); j++) + { + if (queryToConstructReplicaList[j]->replica==replica3 ) + { + return; + } + } + + OnLocalReference(replica3, replicaManager); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnNeverConstruct(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnConstructToThisConnection(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); + //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnConstructToThisConnection(Replica3 *replica, ReplicaManager3 *replicaManager) +{ + RakAssert(replica); + RakAssert(QueryConstructionMode()==QUERY_CONNECTION_FOR_REPLICA_LIST); + (void) replicaManager; + + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica; + constructedReplicaList.Insert(replica,lsr,true,_FILE_AND_LINE_); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnNeverSerialize(LastSerializationResult *lsr, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]==lsr ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnReplicaAlreadyExists(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); + //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadExisting(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int idx; + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica==replica3) + { + OnConstructToThisConnection(idx, replicaManager); + return; + } + } + } + else + { + OnConstructToThisConnection(replica3, replicaManager); + } +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnSendDestructionFromQuery(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToDestructReplicaList[queryToDestructIdx]; + queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]->replica==lsr->replica ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + for (j=0; j < constructedReplicaList.Size(); j++) + { + if (constructedReplicaList[j]->replica==lsr->replica ) + { + constructedReplicaList.RemoveAtIndex(j); + break; + } + } + //assert(queryToConstructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDoNotQueryDestruction(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::ValidateLists(ReplicaManager3 *replicaManager) const +{ + (void) replicaManager; + /* +#ifdef _DEBUG + // Each object should exist only once in either constructedReplicaList or queryToConstructReplicaList + // replicaPointer from LastSerializationResult should be same among all lists + unsigned int idx, idx2; + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + idx2=queryToConstructReplicaList.GetIndexOf(constructedReplicaList[idx]->replica); + if (idx2!=(unsigned int)-1) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + idx2=constructedReplicaList.GetIndexOf(queryToConstructReplicaList[idx]->replica); + if (idx2!=(unsigned int)-1) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + LastSerializationResult *lsr, *lsr2; + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + lsr=constructedReplicaList[idx]; + + idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToSerializeReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToDestructReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + } + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + lsr=queryToConstructReplicaList[idx]; + + idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToSerializeReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToDestructReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + } + + // Verify pointer integrity + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + if (constructedReplicaList[idx]->replica->replicaManager!=replicaManager) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + // Verify pointer integrity + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica->replicaManager!=replicaManager) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } +#endif + */ +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::SendConstruction(DataStructures::List &newObjects, DataStructures::List &deletedObjects, PRO sendParameters, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager3) +{ + if (newObjects.Size()==0 && deletedObjects.Size()==0) + return; + + // All construction and destruction takes place in the same network message + // Otherwise, if objects rely on each other being created the same tick to be valid, this won't always be true + // DataStructures::List serializedObjects; + BitSize_t offsetStart, offsetStart2, offsetEnd; + unsigned int newListIndex, oldListIndex; + RakNet::BitStream bsOut; + NetworkID networkId; + if (isFirstConstruction) + { + bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_STARTED); + bsOut.Write(worldId); + SerializeOnDownloadStarted(&bsOut); + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + } + + // LastSerializationResult* lsr; + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); + bsOut.Write(worldId); + uint16_t objectSize = (uint16_t) newObjects.Size(); + bsOut.Write(objectSize); + + // Construction + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); // overwritten to point to the end of the stream + networkId=newObjects[newListIndex]->GetNetworkID(); + bsOut.Write(networkId); + + RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); + bool actuallyCreateObject = cs==RM3CS_SEND_CONSTRUCTION; + bsOut.Write(actuallyCreateObject); + bsOut.AlignWriteToByteBoundary(); + + if (actuallyCreateObject) + { + // Actually create the object + bsOut.Write(newObjects[newListIndex]->creatingSystemGUID); + offsetStart2=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart2); // overwritten to point to after the call to WriteAllocationID + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + newObjects[newListIndex]->WriteAllocationID(this, &bsOut); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart2); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + newObjects[newListIndex]->SerializeConstruction(&bsOut, this); + } + else + { + newObjects[newListIndex]->SerializeConstructionExisting(&bsOut, this); + } + + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + + RakNet::BitStream bsOut2; + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + bsOut2.Reset(); + RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); + if (cs==RM3CS_SEND_CONSTRUCTION) + { + newObjects[newListIndex]->PostSerializeConstruction(&bsOut2, this); + } + else + { + RakAssert(cs==RM3CS_ALREADY_EXISTS_REMOTELY); + newObjects[newListIndex]->PostSerializeConstructionExisting(&bsOut2, this); + } + if (bsOut2.GetNumberOfBitsUsed()>0) + { + bsOut.Write(true); + bsOut.AlignWriteToByteBoundary(); + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); // overwritten to point to the end of the stream + networkId=newObjects[newListIndex]->GetNetworkID(); + bsOut.Write(networkId); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + bsOut.Write(&bsOut2); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + else + bsOut.Write(false); + } + bsOut.AlignWriteToByteBoundary(); + + // Destruction + objectSize = (uint16_t) deletedObjects.Size(); + bsOut.Write(objectSize); + for (oldListIndex=0; oldListIndex < deletedObjects.Size(); oldListIndex++) + { + networkId=deletedObjects[oldListIndex]->GetNetworkID(); + bsOut.Write(networkId); + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); + deletedObjects[oldListIndex]->deletingSystemGUID=rakPeer->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + bsOut.Write(deletedObjects[oldListIndex]->deletingSystemGUID); + deletedObjects[oldListIndex]->SerializeDestruction(&bsOut, this); + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + + // TODO - shouldn't this be part of construction? + + // Initial Download serialize to a new system + // Immediately send serialize after construction if the replica object already has saved data + // If the object was serialized identically, and does not change later on, then the new connection never gets the data + SerializeParameters sp; + sp.whenLastSerialized=0; + RakNet::BitStream emptyBs; + for (int index=0; index < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) + { + sp.lastSentBitstream[index]=&emptyBs; + sp.pro[index]=sendParameters; + sp.pro[index].reliability=RELIABLE_ORDERED; + } + + sp.bitsWrittenSoFar=0; +// RakNet::Time t = RakNet::GetTimeMS(); + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + sp.destinationConnection=this; + sp.messageTimestamp=0; + RakNet::Replica3 *replica = newObjects[newListIndex]; + // 8/22/09 Forgot ResetWritePointer + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.outputBitstream[z].ResetWritePointer(); + } + + RM3SerializationResult res = replica->Serialize(&sp); + if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && + res!=RM3SR_DO_NOT_SERIALIZE && + res!=RM3SR_SERIALIZED_UNIQUELY) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + } + SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, rakPeer, worldId, GetTime()); +/// newObjects[newListIndex]->whenLastSerialized=t; + + } + // else wait for construction request accepted before serializing + } + + if (isFirstConstruction) + { + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE); + bsOut.Write(worldId); + SerializeOnDownloadComplete(&bsOut); + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + } + + isFirstConstruction=false; + +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::SendValidation(RakNet::RakPeerInterface *rakPeer, WorldId worldId) +{ + // Hijack to mean sendValidation + RakNet::BitStream bsOut; + bsOut.Write((MessageID)ID_REPLICA_MANAGER_SCOPE_CHANGE); + bsOut.Write(worldId); + rakPeer->Send(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,systemAddress,false); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3::Replica3() +{ + creatingSystemGUID=UNASSIGNED_RAKNET_GUID; + deletingSystemGUID=UNASSIGNED_RAKNET_GUID; + replicaManager=0; + forceSendUntilNextUpdate=false; + lsr=0; + referenceIndex = (uint32_t)-1; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3::~Replica3() +{ + if (replicaManager) + { + replicaManager->Dereference(this); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Replica3::BroadcastDestruction(void) +{ + replicaManager->BroadcastDestruction(this,UNASSIGNED_SYSTEM_ADDRESS); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNetGUID Replica3::GetCreatingSystemGUID(void) const +{ + return creatingSystemGUID; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_ClientConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3CS_SEND_CONSTRUCTION; + // Send back to the owner client too, because they couldn't assign the network ID + if (isThisTheServer) + return RM3CS_SEND_CONSTRUCTION; + return RM3CS_NEVER_CONSTRUCT; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_ClientConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) +{ + (void) sourceConnection; + (void) isThisTheServer; + + // OK to create + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_ServerConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + + if (isThisTheServer) + return RM3CS_SEND_CONSTRUCTION; + return RM3CS_NEVER_CONSTRUCT; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_ServerConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) +{ + (void) sourceConnection; + if (isThisTheServer) + return false; + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) +{ + (void) destinationConnection; + + if (p2pMode==R3P2PM_SINGLE_OWNER) + { + // We send to all, others do nothing + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3CS_SEND_CONSTRUCTION; + + // RM3CS_NEVER_CONSTRUCT will not send the object, and will not Serialize() it + return RM3CS_NEVER_CONSTRUCT; + } + else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_SEND_CONSTRUCTION; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_ALREADY_EXISTS_REMOTELY; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT; + } + else + { + RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); + + // RM3CS_ALREADY_EXISTS_REMOTELY will not send the object, but WILL call QuerySerialization() and Serialize() on it. + return RM3CS_ALREADY_EXISTS_REMOTELY; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_PeerToPeer(RakNet::Connection_RM3 *sourceConnection) +{ + (void) sourceConnection; + + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_ClientSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + // Owner client sends to all + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3QSR_CALL_SERIALIZE; + // Server sends to all but owner client + if (isThisTheServer && destinationConnection->GetRakNetGUID()!=creatingSystemGUID) + return RM3QSR_CALL_SERIALIZE; + // Remote clients do not send + return RM3QSR_NEVER_CALL_SERIALIZE; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_ServerSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + // Server sends to all + if (isThisTheServer) + return RM3QSR_CALL_SERIALIZE; + + // Clients do not send + return RM3QSR_NEVER_CALL_SERIALIZE; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) +{ + (void) destinationConnection; + + if (p2pMode==R3P2PM_SINGLE_OWNER) + { + // Owner peer sends to all + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3QSR_CALL_SERIALIZE; + + // Remote peers do not send + return RM3QSR_NEVER_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_DO_NOT_CALL_SERIALIZE; + } + else + { + RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); + return RM3QSR_DO_NOT_CALL_SERIALIZE; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Client(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Server(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_PeerToPeer(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +#endif // _RAKNET_SUPPORT_* diff --git a/node_modules/npm-mas-mas/cmaki_generator/run.sh b/node_modules/npm-mas-mas/cmaki_generator/run.sh new file mode 100644 index 0000000..72b0a36 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/run.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e +MODE=${1} +echo running in mode ${MODE} ... +mkdir -p build/${MODE} +pushd build/${MODE} +cmake ../.. -DCMAKE_BUILD_TYPE=$MODE -DCMAKE_MODULE_PATH=$(pwd)/../../cmaki -DFIRST_ERROR=1 +cmake --build . --config $MODE --target install -- -j8 -k || cmake --build . --config ${MODE} --target install -- -j1 +ctest . --no-compress-output --output-on-failure -T Test -C ${MODE} -V +popd diff --git a/node_modules/npm-mas-mas/cmaki_generator/run_test.sh b/node_modules/npm-mas-mas/cmaki_generator/run_test.sh new file mode 100644 index 0000000..967bf29 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/run_test.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +function print_if_has_content() +{ + file=$1 + minimumsize=400 + actualsize=$(wc -c <"$file") + if [ $actualsize -ge $minimumsize ]; + then + cat $file + fi +} + +echo Running test: $1 +export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer-3.6) +export ASAN_OPTIONS="check_initialization_order=1" +rm $1.coverage 2> /dev/null +rm $1.gcno 2> /dev/null +rm default.profraw 2> /dev/null +./$1 +ret=$? +llvm-profdata-3.6 merge -o $1.gcno default.profraw 2> /dev/null +llvm-cov-3.6 show ./$1 -instr-profile=$1.gcno > $1.coverage +cat $1.coverage | ansi2html > $1.html +print_if_has_content $1.html +exit $ret + diff --git a/node_modules/npm-mas-mas/cmaki_generator/run_tests.py b/node_modules/npm-mas-mas/cmaki_generator/run_tests.py new file mode 100644 index 0000000..66f01d7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/run_tests.py @@ -0,0 +1,175 @@ +import os +import utils +import logging +from third_party import platforms +from third_party import build_unittests_foldername +from itertools import product +from third_party import get_identifier + +def run_tests(node, parameters, compiler_replace_maps, unittests): + + old_cwd = os.getcwd() + + rootdir = parameters.rootdir + rootdir = utils.get_norm_path(rootdir) + rootdir = rootdir.replace('\\', '/') + + cmakelib_dir = parameters.cmakefiles + cmakelib_dir = utils.get_norm_path(cmakelib_dir) + cmakelib_dir = cmakelib_dir.replace('\\', '/') + + cmake3p_dir = parameters.prefix + cmake3p_dir = utils.get_norm_path(cmake3p_dir) + cmake3p_dir = cmake3p_dir.replace('\\', '/') + + cmake_prefix = parameters.prefix + cmake_prefix = utils.get_norm_path(cmake_prefix) + cmake_prefix = cmake_prefix.replace('\\', '/') + + cmake_third_party_dir = parameters.third_party_dir + cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) + cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') + + package = node.get_package_name() + package_upper = node.get_package_name_norm_upper() + version = node.get_version() + packing = node.is_packing() + if not packing: + logging.warning("No need run_tests, because wasn't generated a package") + return 0 + + # prepare unittests + # can be a file or content + unittest_value = node.get_unittest() + if unittest_value is not None: + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + builddir = node.get_build_directory(plat, build_mode) + path_test = os.path.join(builddir, build_unittests_foldername) + utils.trymkdir(path_test) + + # is is a file + unittest_path = os.path.join(builddir, unittest_value) + if os.path.isfile(unittest_path): + with open(unittest_path, 'rt') as f: + unittest_value = f.read() + + with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: + f.write(unittest_value) + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "prepare"') + break + else: + logging.warning('[%s] No test present.' % package) + + folder_3rdparty = parameters.third_party_dir + output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) + + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, reversed(build_modes)): + for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): + # verify md5sum + install_directory = node.get_install_directory(plat) + workspace = node.get_workspace(plat) + utils.trymkdir(install_directory) + with utils.working_directory(install_directory): + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) + prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) + if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): + with open(prefix_package_md5, 'rt') as f: + md5sum = f.read().strip() + + try: + logging.debug("expected md5: %s" % md5sum) + for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): + if len(line) > 0: + # md5sum filename + chunks = line.split(' ') + chunks = list(filter(None, chunks)) + assert(len(chunks) > 0) + md5sum_real = chunks[0] + logging.debug("real md5: %s" % md5sum_real) + + if (md5sum != md5sum_real): + logging.error('Error en generated md5sum file!!!') + logging.error('Expected: %s' % md5sum) + logging.error('Found: %s' % md5sum_real) + # add error to node + node.ret += 1 + except utils.NotFoundProgram: + logging.info('can\'t verify md5 because not found cmake') + else: + logging.warning('Skipping verification md5 because don\'t exists package or md5') + + logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) + + # OJO con borrar cmake3p, se borra la marca + # node.remove_cmake3p( cmake3p_dir ) + + builddir = os.path.join(old_cwd, node.get_build_directory(plat, build_mode)) + logging.info('Using builddir %s' % builddir) + unittest_folder = os.path.join(builddir, build_unittests_foldername) + unittest_found = os.path.join(unittest_folder, 'main.cpp') + unittest_found = unittest_found.replace('\\', '/') + unittest_root = os.path.join(old_cwd, build_unittests_foldername) + + if os.path.exists(unittest_found): + + logging.info('Search cmakelib in %s' % cmakelib_dir) + if os.path.isdir(os.path.join(cmakelib_dir)): + + with utils.working_directory(unittest_folder): + + generator_extra = '' + if generator is not None: + generator_extra = '-G"%s"' % generator + + find_packages = [] + find_packages.append(package) + for dep in node.get_depends_raw(): + package_name = dep.get_package_name() + find_packages.append(package_name) + find_packages_str = ';'.join(find_packages) + + # remove CMakeCache.txt for avoid problems when + # change of generator + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + + cmd = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % ( + unittest_root, + generator_extra, + cmake_prefix, + get_identifier('COMPILER'), + get_identifier('ALL'), + cmakelib_dir, + package, + package_upper, + build_mode, + cmake_third_party_dir, + unittest_found, + cmake_prefix, + find_packages_str, + build_mode, + build_mode) + ret = utils.safe_system(cmd, env=env_modified) + node.ret += abs(ret) + if ret != 0: + unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' + else: + unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' + else: + unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' + else: + unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' + + if node.ret != 0: + logging.warning('Cleaning packages because tests are failed.') + node.remove_packages() + + # successful + return True + diff --git a/node_modules/npm-mas-mas/cmaki_generator/save_package.py b/node_modules/npm-mas-mas/cmaki_generator/save_package.py new file mode 100755 index 0000000..57fd37a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/save_package.py @@ -0,0 +1,31 @@ +import os +import sys +import logging +import argparse +import urllib +import csv +import utils +import subprocess + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + parser.add_argument('--version', required=True, dest='version', help='version package fixed', default=None) + parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) + parameters = parser.parse_args() + + depends_file = parameters.depends + if os.path.exists(depends_file): + data = utils.deserialize(depends_file) + else: + data = {} + # serialize if is new data + if parameters.name not in data: + data[parameters.name] = parameters.version + logging.info('serialize data = %s' % data) + depends_file_tmp = depends_file + '.tmp' + utils.serialize(data, depends_file_tmp) + ret = subprocess.call('python -m json.tool %s > %s' % (depends_file_tmp, depends_file), shell=True) + os.remove(depends_file_tmp) + sys.exit(ret) + diff --git a/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt new file mode 100644 index 0000000..6683d9c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt @@ -0,0 +1,1366 @@ +cmake_minimum_required(VERSION 2.8) +project(SDL2 C) +include(CheckFunctionExists) +include(CheckLibraryExists) +include(CheckIncludeFiles) +include(CheckIncludeFile) +include(CheckSymbolExists) +include(CheckCSourceRuns) +include(CheckCCompilerFlag) +include(CheckTypeSize) +include(CheckStructHasMember) +include(CMakeDependentOption) +include(FindPkgConfig) +set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") +include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) +include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) + +# General settings +# Edit include/SDL_version.h and change the version, then: +# SDL_MICRO_VERSION += 1; +# SDL_INTERFACE_AGE += 1; +# SDL_BINARY_AGE += 1; +# if any functions have been added, set SDL_INTERFACE_AGE to 0. +# if backwards compatibility has been broken, +# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. +set(SDL_MAJOR_VERSION 2) +set(SDL_MINOR_VERSION 0) +set(SDL_MICRO_VERSION 3) +set(SDL_INTERFACE_AGE 1) +set(SDL_BINARY_AGE 3) +set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") + +# Calculate a libtool-like version number +math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") +set(LT_REVISION "${SDL_INTERFACE_AGE}") +set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") +set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") + +message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") + +# General settings & flags +set(LIBRARY_OUTPUT_DIRECTORY "build") +# Check for 64 or 32 bit +set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) +if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(ARCH_64 TRUE) + set(PROCESSOR_ARCH "x64") +else() + set(ARCH_64 FALSE) + set(PROCESSOR_ARCH "x86") +endif() +set(LIBNAME SDL2) +if(NOT LIBTYPE) + set(LIBTYPE SHARED) +endif() + +# Get the platform +if(WIN32) + if(NOT WINDOWS) + set(WINDOWS TRUE) + endif() +elseif(UNIX AND NOT APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") + set(LINUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") + set(NETBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") + set(OPENBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") + set(GNU TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") + set(BSDI TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") + set(SYSV5 TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") + set(SOLARIS TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") + set(HPUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") + set(AIX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") + set(MINIX TRUE) + endif() +elseif(APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") + set(DARWIN TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") + set(MACOSX TRUE) + endif() + # TODO: iOS? +elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") + message_error("BeOS support has been removed as of SDL 2.0.2.") +elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") + set(HAIKU TRUE) +endif() + +# Don't mistake osx for unix +if(UNIX AND NOT APPLE) + set(UNIX_SYS ON) +else() + set(UNIX_SYS OFF) +endif() + +if(UNIX OR APPLE) + set(UNIX_OR_MAC_SYS ON) +else() + set(UNIX_OR_MAC_SYS OFF) +endif() + +if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. + set(PTHREADS_ENABLED_BY_DEFAULT ON) +else() + set(PTHREADS_ENABLED_BY_DEFAULT OFF) +endif() + +# Default option knobs +if(APPLE OR ARCH_64) + set(OPT_DEF_SSEMATH ON) +endif() +if(UNIX OR MINGW OR MSYS) + set(OPT_DEF_LIBC ON) +endif() + +# Compiler info +if(CMAKE_COMPILER_IS_GNUCC) + set(USE_GCC TRUE) + set(OPT_DEF_ASM TRUE) +elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") + set(USE_CLANG TRUE) + set(OPT_DEF_ASM TRUE) +elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ + set(OPT_DEF_ASM TRUE) + #set(CMAKE_C_FLAGS "/ZI /WX- / +else() + set(OPT_DEF_ASM FALSE) +endif() + +# Default flags, if not set otherwise +if("$ENV{CFLAGS}" STREQUAL "") + if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "-g -O3") + endif() +else() + set(CMAKE_C_FLAGS "$ENV{CFLAGS}") + list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") +endif() +if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 + list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") +endif() + +if(MSVC) + option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) + if(FORCE_STATIC_VCRT) + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach() + endif() +endif() + +# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, +# etc. are created correctly. +set(SDL_LIBS "-lSDL2") +set(SDL_CFLAGS "") + +# Emscripten toolchain has a nonempty default value for this, and the checks +# in this file need to change that, so remember the original value, and +# restore back to that afterwards. For check_function_exists() to work in +# Emscripten, this value must be at its default value. +set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) + +if(CYGWIN) + # We build SDL on cygwin without the UNIX emulation layer + include_directories("-I/usr/include/mingw") + set(CMAKE_REQUIRED_FLAGS "-mno-cygwin") + check_c_source_compiles("int main(int argc, char **argv) {}" + HAVE_GCC_NO_CYGWIN) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_GCC_NO_CYGWIN) + list(APPEND EXTRA_LDFLAGS "-mno-cygwin") + list(APPEND SDL_LIBS "-mno-cygwin") + endif() + set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") +endif() + +add_definitions(-DUSING_GENERATED_CONFIG_H) +# General includes +include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) + +if(EMSCRIPTEN) + # Set up default values for the currently supported set of subsystems: + # Emscripten/Javascript does not have assembly support, a dynamic library + # loading architecture, low-level CPU inspection or multithreading. + set(OPT_DEF_ASM FALSE) + set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) + set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) + set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) + set(DLOPEN_ENABLED_BY_DEFAULT OFF) +else() + set(SDL_SHARED_ENABLED_BY_DEFAULT ON) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT ON) + set(SDL_THREADS_ENABLED_BY_DEFAULT ON) + set(SDL_LOADSO_ENABLED_BY_DEFAULT ON) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT ON) + set(DLOPEN_ENABLED_BY_DEFAULT ON) +endif() + +set(SDL_SUBSYSTEMS + Atomic Audio Video Render Events Joystick Haptic Power Threads Timers + File Loadso CPUinfo Filesystem) +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) + set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) + endif() + option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) +endforeach() + +option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") +#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) +set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) +set_option(GCC_ATOMICS "Use gcc builtin atomics" ${USE_GCC}) +set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) +set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) +set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) +set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) +set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) +set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) +set_option(DISKAUDIO "Support the disk writer audio driver" ON) +set_option(DUMMYAUDIO "Support the dummy audio driver" ON) +set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) +dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) +set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) +dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND_SHARED" OFF) +set_option(VIDEO_DUMMY "Use dummy video driver" ON) +set_option(VIDEO_OPENGL "Include OpenGL support" ON) +set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) +set_option(PTHREADS "Use POSIX threads for multi-threading" ${PTHREADS_ENABLED_BY_DEFAULT}) +dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) +set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${DLOPEN_ENABLED_BY_DEFAULT}) +set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) +set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) +dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) +set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) +dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) +set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) +dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) +set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) +dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) +set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) +set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) +set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) +set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) +set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) +set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) +set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) +set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) +set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) +dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) +set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) +foreach(_SUB ${SDL_X11_OPTIONS}) + string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) + dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) +endforeach() +set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) +set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) +set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) + +# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here +# The options below are for compatibility to configure's default behaviour. +set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") +set(SDL_STATIC ON CACHE BOOL "Build a static version of the library") + +# General source files +file(GLOB SOURCE_FILES + ${SDL2_SOURCE_DIR}/src/*.c + ${SDL2_SOURCE_DIR}/src/atomic/*.c + ${SDL2_SOURCE_DIR}/src/audio/*.c + ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c + ${SDL2_SOURCE_DIR}/src/dynapi/*.c + ${SDL2_SOURCE_DIR}/src/events/*.c + ${SDL2_SOURCE_DIR}/src/file/*.c + ${SDL2_SOURCE_DIR}/src/libm/*.c + ${SDL2_SOURCE_DIR}/src/render/*.c + ${SDL2_SOURCE_DIR}/src/render/*/*.c + ${SDL2_SOURCE_DIR}/src/stdlib/*.c + ${SDL2_SOURCE_DIR}/src/thread/*.c + ${SDL2_SOURCE_DIR}/src/timer/*.c + ${SDL2_SOURCE_DIR}/src/video/*.c) + + +if(ASSERTIONS STREQUAL "auto") + # Do nada - use optimization settings to determine the assertion level +elseif(ASSERTIONS STREQUAL "disabled") + set(SDL_DEFAULT_ASSERT_LEVEL 0) +elseif(ASSERTIONS STREQUAL "release") + set(SDL_DEFAULT_ASSERT_LEVEL 1) +elseif(ASSERTIONS STREQUAL "enabled") + set(SDL_DEFAULT_ASSERT_LEVEL 2) +elseif(ASSERTIONS STREQUAL "paranoid") + set(SDL_DEFAULT_ASSERT_LEVEL 3) +else() + message_error("unknown assertion level") +endif() +set(HAVE_ASSERTIONS ${ASSERTIONS}) + +# Compiler option evaluation +if(USE_GCC OR USE_CLANG) + if(DEPENDENCY_TRACKING) + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 3 + #error Dependency tracking requires GCC 3.0 or newer + #endif + int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) + endif() + + if(GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + void *x, *y, *z; + __sync_lock_test_and_set(&a, 4); + __sync_lock_test_and_set(&x, y); + __sync_fetch_and_add(&a, 1); + __sync_bool_compare_and_swap(&a, 5, 10); + __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) + if(NOT HAVE_GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + __sync_lock_test_and_set(&a, 1); + __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) + endif() + endif() + + set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") + check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" + HAVE_GCC_PREFERRED_STACK_BOUNDARY) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror -Wno-error=implicit-function-declaration") + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 4 + #error SDL only uses visibility attributes in GCC 4 or newer + #endif + int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) + if(HAVE_GCC_FVISIBILITY) + list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + check_c_compiler_flag(-Wall HAVE_GCC_WALL) + if(HAVE_GCC_WALL) + if(HAIKU) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") + endif() + endif() +endif() + +if(ASSEMBLY) + if(USE_GCC OR USE_CLANG) + set(SDL_ASSEMBLY_ROUTINES 1) + # TODO: Those all seem to be quite GCC specific - needs to be + # reworked for better compiler support + set(HAVE_ASSEMBLY TRUE) + if(MMX) + set(CMAKE_REQUIRED_FLAGS "-mmmx") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __MMX__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_MMX) + if(HAVE_MMX) + list(APPEND EXTRA_CFLAGS "-mmmx") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(3DNOW) + set(CMAKE_REQUIRED_FLAGS "-m3dnow") + check_c_source_compiles(" + #include + #ifndef __3dNOW__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { + void *p = 0; + _m_prefetch(p); + }" HAVE_3DNOW) + if(HAVE_3DNOW) + list(APPEND EXTRA_CFLAGS "-m3dnow") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE) + set(CMAKE_REQUIRED_FLAGS "-msse") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE) + if(HAVE_SSE) + list(APPEND EXTRA_CFLAGS "-msse") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE2) + set(CMAKE_REQUIRED_FLAGS "-msse2") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE2__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE2) + if(HAVE_SSE2) + list(APPEND EXTRA_CFLAGS "-msse2") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSEMATH) + if(SSE OR SSE2) + if(USE_GCC) + list(APPEND EXTRA_CFLAGS "-mfpmath=387") + endif() + set(HAVE_SSEMATH TRUE) + endif() + endif() + + if(ALTIVEC) + set(CMAKE_REQUIRED_FLAGS "-maltivec") + check_c_source_compiles(" + #include + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) + check_c_source_compiles(" + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set + list(APPEND EXTRA_CFLAGS "-maltivec") + set(SDL_ALTIVEC_BLITTERS 1) + if(HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC_H 1) + endif() + endif() + endif() + elseif(MSVC_VERSION GREATER 1500) + # TODO: SDL_cpuinfo.h needs to support the user's configuration wish + # for MSVC - right now it is always activated + if(NOT ARCH_64) + set(HAVE_MMX TRUE) + set(HAVE_3DNOW TRUE) + endif() + set(HAVE_SSE TRUE) + set(HAVE_SSE2 TRUE) + set(SDL_ASSEMBLY_ROUTINES 1) + endif() +# TODO: +#else() +# if(USE_GCC OR USE_CLANG) +# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-mmx") +# endif() +endif() + +# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define +# anything. +if(LIBC) + if(WINDOWS AND NOT MINGW) + set(HAVE_LIBC TRUE) + foreach(_HEADER stdio.h string.h ctype.h math.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + set(${_HAVE_H} 1) + endforeach() + set(HAVE_SIGNAL_H 1) + foreach(_FN + malloc calloc realloc free qsort abs memset memcpy memmove memcmp + strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp + _stricmp _strnicmp sscanf atan atan2 acos asin ceil copysign cos + cosf fabs floor log pow scalbn sin sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(HAVE_${_UPPER} 1) + endforeach() + if(NOT CYGWIN AND NOT MINGW) + set(HAVE_ALLOCA 1) + endif() + set(HAVE_M_PI 1) + add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI + set(STDC_HEADERS 1) + else() + set(HAVE_LIBC TRUE) + check_include_file(sys/types.h HAVE_SYS_TYPES_H) + foreach(_HEADER + stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h + strings.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + check_include_file("${_HEADER}" ${_HAVE_H}) + endforeach() + + check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) + check_type_size("size_t" SIZEOF_SIZE_T) + check_symbol_exists(M_PI math.h HAVE_M_PI) + # TODO: refine the mprotect check + check_c_source_compiles("#include + #include + int main() { }" HAVE_MPROTECT) + foreach(_FN + strtod malloc calloc realloc free getenv setenv putenv unsetenv + qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat + strdup _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull + atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp + vsscanf vsnprintf fseeko fseeko64 sigaction setjmp + nanosleep sysconf sysctlbyname + ) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + + check_library_exists(m pow "" HAVE_LIBM) + if(HAVE_LIBM) + set(CMAKE_REQUIRED_LIBRARIES m) + foreach(_FN + atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin + sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + set(CMAKE_REQUIRED_LIBRARIES) + list(APPEND EXTRA_LIBS m) + endif() + + check_library_exists(iconv iconv_open "" HAVE_LIBICONV) + if(HAVE_LIBICONV) + list(APPEND EXTRA_LIBS iconv) + endif() + + check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) + endif() +else() + if(WINDOWS) + set(HAVE_STDARG_H 1) + set(HAVE_STDDEF_H 1) + endif() +endif() + + +# Enable/disable various subsystems of the SDL library +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if(NOT SDL_${_OPT}) + set(SDL_${_OPT}_DISABLED 1) + endif() +endforeach() +if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) +endif() +if(SDL_HAPTIC) + if(NOT SDL_JOYSTICK) + # Haptic requires some private functions from the joystick subsystem. + message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") + endif() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(SDL_POWER) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) +endif() +# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: +# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set +# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), +# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings + +# General SDL subsystem options, valid for all platforms +if(SDL_AUDIO) + # CheckDummyAudio/CheckDiskAudio - valid for all platforms + if(DUMMYAUDIO) + set(SDL_AUDIO_DRIVER_DUMMY 1) + file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) + set(HAVE_DUMMYAUDIO TRUE) + endif() + if(DISKAUDIO) + set(SDL_AUDIO_DRIVER_DISK 1) + file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) + set(HAVE_DISKAUDIO TRUE) + endif() +endif() + +if(SDL_DLOPEN) + # Relevant for Unix/Darwin only + if(UNIX OR APPLE) + CheckDLOPEN() + endif() +endif() + +if(SDL_VIDEO) + if(VIDEO_DUMMY) + set(SDL_VIDEO_DRIVER_DUMMY 1) + file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) + set(HAVE_VIDEO_DUMMY TRUE) + set(HAVE_SDL_VIDEO TRUE) + endif() +endif() + +# Platform-specific options and settings +if(EMSCRIPTEN) + # Hide noisy warnings that intend to aid mostly during initial stages of porting a new + # project. Uncomment at will for verbose cross-compiling -I/../ path info. + add_definitions(-Wno-warn-absolute-paths) + if(SDL_AUDIO) + file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) + endif() + if(SDL_FILESYSTEM) + file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) + endif() + if(SDL_JOYSTICK) + file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) + endif() + if(SDL_POWER) + file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) + endif() + if(SDL_VIDEO) + file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) + endif() +elseif(UNIX AND NOT APPLE) + if(SDL_AUDIO) + if(SYSV5 OR SOLARIS OR HPUX) + set(SDL_AUDIO_DRIVER_SUNAUDIO 1) + file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(NETBSD OR OPENBSD) + set(SDL_AUDIO_DRIVER_BSD 1) + file(GLOB BSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/bsd/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${BSD_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(AIX) + set(SDL_AUDIO_DRIVER_PAUDIO 1) + file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + CheckOSS() + CheckALSA() + CheckPulseAudio() + CheckESD() + CheckARTS() + CheckNAS() + CheckSNDIO() + CheckFusionSound() + endif() + + if(SDL_VIDEO) + CheckX11() + CheckMir() + CheckDirectFB() + CheckOpenGLX11() + CheckOpenGLESX11() + CheckWayland() + endif() + + if(LINUX) + check_c_source_compiles(" + #include + #ifndef EVIOCGNAME + #error EVIOCGNAME() ioctl not available + #endif + int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) + + check_c_source_compiles(" + #include + #include + + int main(int argc, char **argv) + { + struct kbentry kbe; + kbe.kb_table = KG_CTRL; + ioctl(0, KDGKBENT, &kbe); + }" HAVE_INPUT_KD) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(HAVE_INPUT_EVENTS) + set(SDL_INPUT_LINUXEV 1) + endif() + + if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) + set(SDL_HAPTIC_LINUX 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + if(HAVE_INPUT_KD) + set(SDL_INPUT_LINUXKD 1) + endif() + + check_include_file("libudev.h" HAVE_LIBUDEV_H) + + # !!! FIXME: this needs pkg-config to find the include path, I think. + check_include_file("dbus/dbus.h" HAVE_DBUS_DBUS_H) + endif() + + if(INPUT_TSLIB) + check_c_source_compiles(" + #include \"tslib.h\" + int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) + if(HAVE_INPUT_TSLIB) + set(SDL_INPUT_TSLIB 1) + list(APPEND EXTRA_LIBS ts) + endif() + endif() + + if(SDL_JOYSTICK) + CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? + if(LINUX) + set(SDL_JOYSTICK_LINUX 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + endif() + + CheckPTHREAD() + + if(CLOCK_GETTIME) + check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + list(APPEND EXTRA_LIBS rt) + set(HAVE_CLOCK_GETTIME 1) + else() + check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + endif() + + check_include_file(linux/version.h HAVE_LINUX_VERSION_H) + if(HAVE_LINUX_VERSION_H) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") + endif() + + if(SDL_POWER) + if(LINUX) + set(SDL_POWER_LINUX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_UNIX 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(RPATH) + set(SDL_RLD_FLAGS "") + if(BSDI OR FREEBSD OR LINUX OR NETBSD) + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") + elseif(SOLARIS) + set(SDL_RLD_FLAGS "-R\${libdir}") + endif() + set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) + set(HAVE_RPATH TRUE) + endif() + +elseif(WINDOWS) + find_program(WINDRES windres) + + check_c_source_compiles(" + #include + int main(int argc, char **argv) { }" HAVE_WIN32_CC) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + # Check for DirectX + if(DIRECTX) + if("$ENV{DXSDK_DIR}" STREQUAL "") + message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") + endif() + set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") + check_include_file(d3d9.h HAVE_D3D_H) + check_include_file(d3d11_1.h HAVE_D3D11_H) + check_include_file(ddraw.h HAVE_DDRAW_H) + check_include_file(dsound.h HAVE_DSOUND_H) + check_include_file(dinput.h HAVE_DINPUT_H) + check_include_file(xaudio2.h HAVE_XAUDIO2_H) + check_include_file(dxgi.h HAVE_DXGI_H) + if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H OR HAVE_XAUDIO2_H) + set(HAVE_DIRECTX TRUE) + # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks + link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) + include_directories($ENV{DXSDK_DIR}\\Include) + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_WINMM 1) + file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + + if(HAVE_DSOUND_H) + set(SDL_AUDIO_DRIVER_DSOUND 1) + file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) + endif() + + if(HAVE_XAUDIO2_H) + set(SDL_AUDIO_DRIVER_XAUDIO2 1) + file(GLOB XAUDIO2_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/xaudio2/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${XAUDIO2_AUDIO_SOURCES}) + endif() + endif() + + if(SDL_VIDEO) + # requires SDL_LOADSO on Windows (IME, DX, etc.) + if(NOT SDL_LOADSO) + message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") + endif() + set(SDL_VIDEO_DRIVER_WINDOWS 1) + file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) + + if(RENDER_D3D AND HAVE_D3D_H) + set(SDL_VIDEO_RENDER_D3D 1) + set(HAVE_RENDER_D3D TRUE) + endif() + if(RENDER_D3D AND HAVE_D3D11_H) + set(SDL_VIDEO_RENDER_D3D11 1) + set(HAVE_RENDER_D3D TRUE) + endif() + set(HAVE_SDL_VIDEO TRUE) + endif() + + if(SDL_THREADS) + set(SDL_THREAD_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c + ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) + set(HAVE_SDL_THREADS TRUE) + endif() + + if(SDL_POWER) + set(SDL_POWER_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) + set(HAVE_SDL_POWER TRUE) + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_WINDOWS 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Libraries for Win32 native and MinGW + list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) + + # TODO: in configure.in the check for timers is set on + # cygwin | mingw32* - does this include mingw32CE? + if(SDL_TIMERS) + set(SDL_TIMER_WINDOWS 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(SDL_LOADSO) + set(SDL_LOADSO_WINDOWS 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(SDL_VIDEO) + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_WGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + if(SDL_JOYSTICK) + if(HAVE_DINPUT_H) + set(SDL_JOYSTICK_DINPUT 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_dxjoystick.c) + list(APPEND EXTRA_LIBS dinput8 dxguid dxerr) + else() + set(SDL_JOYSTICK_WINMM 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_mmjoystick.c) + endif() + set(HAVE_SDL_JOYSTICK TRUE) + endif() + + if(SDL_HAPTIC AND HAVE_DINPUT_H) + set(SDL_HAPTIC_DINPUT 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/haptic/windows/SDL_syshaptic.c) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) + if(MINGW OR CYGWIN) + list(APPEND EXTRA_LIBS mingw32) + list(APPEND EXTRA_LDFLAGS "-mwindows") + set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") + list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") + endif() +elseif(APPLE) + # TODO: rework this for proper MacOS X, iOS and Darwin support + + # Requires the darwin file implementation + if(SDL_FILE) + file(GLOB EXTRA_SOURCES ${PROJECT_SOURCE_DIR}/src/file/cocoa/*.m) + set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) + set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) + set(HAVE_SDL_FILE TRUE) + set(SDL_FRAMEWORK_COCOA 1) + else() + message_error("SDL_FILE must be enabled to build on MacOS X") + endif() + + if(SDL_AUDIO) + set(MACOSX_COREAUDIO 1) + file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + set(SDL_FRAMEWORK_COREAUDIO 1) + set(SDL_FRAMEWORK_AUDIOUNIT 1) + endif() + + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_IOKIT 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + endif() + + if(SDL_HAPTIC) + set(SDL_HAPTIC_IOKIT 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + if(NOT SDL_JOYSTICK) + message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") + endif() + endif() + + if(SDL_POWER) + set(SDL_POWER_MACOSX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + set(SDL_FRAMEWORK_CARBON 1) + set(SDL_FRAMEWORK_IOKIT 1) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_COCOA 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) + set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Actually load the frameworks at the end so we don't duplicate include. + if(SDL_FRAMEWORK_COCOA) + find_library(COCOA_LIBRARY Cocoa) + list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) + endif() + if(SDL_FRAMEWORK_IOKIT) + find_library(IOKIT IOKit) + list(APPEND EXTRA_LIBS ${IOKIT}) + endif() + if(SDL_FRAMEWORK_FF) + find_library(FORCEFEEDBACK ForceFeedback) + list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) + endif() + if(SDL_FRAMEWORK_CARBON) + find_library(CARBON_LIBRARY Carbon) + list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) + endif() + if(SDL_FRAMEWORK_COREAUDIO) + find_library(COREAUDIO CoreAudio) + list(APPEND EXTRA_LIBS ${COREAUDIO}) + endif() + if(SDL_FRAMEWORK_AUDIOUNIT) + find_library(AUDIOUNIT AudioUnit) + list(APPEND EXTRA_LIBS ${AUDIOUNIT}) + endif() + + # iOS hack needed - http://code.google.com/p/ios-cmake/ ? + if(SDL_VIDEO) + CheckCOCOA() + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_CGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + if(DARWIN) + find_library(OpenGL_LIBRARY OpenGL) + list(APPEND EXTRA_LIBRARIES ${OpenGL_LIBRARY}) + endif() + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +elseif(HAIKU) + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_HAIKU 1) + file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + set(SDL_FILESYSTEM_HAIKU 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + + if(SDL_TIMERS) + set(SDL_TIMER_HAIKU 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(VIDEO_OPENGL) + # TODO: Use FIND_PACKAGE(OpenGL) instead + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_BGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + list(APPEND EXTRA_LIBS GL) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +endif() + +# Dummies +# configure.in does it differently: +# if not have X +# if enable_X { SDL_X_DISABLED = 1 } +# [add dummy sources] +# so it always adds a dummy, without checking, if it was actually requested. +# This leads to missing internal references on building, since the +# src/X/*.c does not get included. +if(NOT HAVE_SDL_JOYSTICK) + set(SDL_JOYSTICK_DISABLED 1) + if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX + + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + endif() +endif() +if(NOT HAVE_SDL_HAPTIC) + set(SDL_HAPTIC_DISABLED 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(NOT HAVE_SDL_LOADSO) + set(SDL_LOADSO_DISABLED 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) +endif() +if(NOT HAVE_SDL_FILESYSTEM) + set(SDL_FILESYSTEM_DISABLED 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) +endif() + +# We always need to have threads and timers around +if(NOT HAVE_SDL_THREADS) + set(SDL_THREADS_DISABLED 1) + file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) +endif() +if(NOT HAVE_SDL_TIMERS) + set(SDL_TIMERS_DISABLED 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) +endif() + +if(NOT SDLMAIN_SOURCES) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) +endif() + +# Append the -MMD -MT flags +# if(DEPENDENCY_TRACKING) +# if(COMPILER_IS_GNUCC) +# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") +# endif() +# endif() + +configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" + "${SDL2_BINARY_DIR}/include/SDL_config.h") + +# Prepare the flags and remove duplicates +if(EXTRA_LDFLAGS) + list(REMOVE_DUPLICATES EXTRA_LDFLAGS) +endif() +if(EXTRA_LIBS) + list(REMOVE_DUPLICATES EXTRA_LIBS) +endif() +if(EXTRA_CFLAGS) + list(REMOVE_DUPLICATES EXTRA_CFLAGS) +endif() +listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) +set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) + +# Compat helpers for the configuration files +if(NOT WINDOWS OR CYGWIN) + # TODO: we need a Windows script, too + execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) + + set(prefix ${CMAKE_INSTALL_PREFIX}) + set(exec_prefix "\${prefix}") + set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") + set(bindir "\${exec_prefix}/bin") + set(includedir "\${prefix}/include") + if(SDL_STATIC) + set(ENABLE_STATIC_TRUE "") + set(ENABLE_STATIC_FALSE "#") + else() + set(ENABLE_STATIC_TRUE "#") + set(ENABLE_STATIC_FALSE "") + endif() + if(SDL_SHARED) + set(ENABLE_SHARED_TRUE "") + set(ENABLE_SHARED_FALSE "#") + else() + set(ENABLE_SHARED_TRUE "#") + set(ENABLE_SHARED_FALSE "") + endif() + + # Clean up the different lists + listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") + set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) + list(REMOVE_DUPLICATES SDL_STATIC_LIBS) + listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) + set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) + listtostr(SDL_LIBS _SDL_LIBS) + set(SDL_LIBS ${_SDL_LIBS}) + + # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") + # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") + + configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" + "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config") + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" + "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) +endif() + +##### Info output ##### +message(STATUS "") +message(STATUS "SDL2 was configured with the following options:") +message(STATUS "") +message(STATUS "Platform: ${CMAKE_SYSTEM}") +message(STATUS "64-bit: ${ARCH_64}") +message(STATUS "Compiler: ${CMAKE_C_COMPILER}") +message(STATUS "") +message(STATUS "Subsystems:") +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + message_bool_option(${_SUB} SDL_${_OPT}) +endforeach() +message(STATUS "") +message(STATUS "Options:") +list(SORT ALLOPTIONS) +foreach(_OPT ${ALLOPTIONS}) + # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters + # Get the padding + string(LENGTH ${_OPT} _OPTLEN) + math(EXPR _PADLEN "23 - ${_OPTLEN}") + string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) + message_tested_option(${_OPT} ${_PADDING}) +endforeach() +message(STATUS "") +message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") +message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") +message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") +message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") +message(STATUS "") +message(STATUS " Build Shared Library: ${SDL_SHARED}") +message(STATUS " Build Static Library: ${SDL_STATIC}") +message(STATUS "") +if(UNIX) + message(STATUS "If something was not detected, although the libraries") + message(STATUS "were installed, then make sure you have set the") + message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") + message(STATUS "") +endif() + +# Ensure that the extra cflags are used at compile time +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") + +# Always build SDLmain +add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) +set(_INSTALL_LIBS "SDL2main") + +if(SDL_SHARED) + add_library(SDL2 SHARED ${SOURCE_FILES}) + if(UNIX) + set_target_properties(SDL2 PROPERTIES + VERSION ${LT_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2-${LT_RELEASE}") + else() + set_target_properties(SDL2 PROPERTIES + VERSION ${SDL_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2") + endif() + set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) + target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) +endif() + +if(SDL_STATIC) + set (BUILD_SHARED_LIBS FALSE) + add_library(SDL2-static STATIC ${SOURCE_FILES}) + set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") + if(MSVC) + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + # TODO: Win32 platforms keep the same suffix .lib for import and static + # libraries - do we need to consider this? + set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) + target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) +endif() + +##### Installation targets ##### +install(TARGETS ${_INSTALL_LIBS} + LIBRARY DESTINATION "lib${LIB_SUFFIX}" + ARCHIVE DESTINATION "lib${LIB_SUFFIX}") + +file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) +file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) +foreach(_FNAME ${BIN_INCLUDE_FILES}) + get_filename_component(_INCNAME ${_FNAME} NAME) + list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) +endforeach() +list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) +install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) + +if(NOT WINDOWS OR CYGWIN) + if(SDL_SHARED) + install(CODE " + execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + \"libSDL2-2.0.so\" \"libSDL2.so\")") + install(FILES ${SDL2_BINARY_DIR}/libSDL2.so DESTINATION "lib${LIB_SUFFIX}") + endif() + if(FREEBSD) + # FreeBSD uses ${PREFIX}/libdata/pkgconfig + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") + else() + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc + DESTINATION "lib${LIB_SUFFIX}/pkgconfig") + endif() + install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) + # TODO: what about the .spec file? Is it only needed for RPM creation? + install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "share/aclocal") +else() + install(TARGETS SDL2 RUNTIME DESTINATION bin) +endif() + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt new file mode 100644 index 0000000..bbad766 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt @@ -0,0 +1,1849 @@ + + +cmake_minimum_required(VERSION 2.8.11) +project(SDL2 C) + +# !!! FIXME: this should probably do "MACOSX_RPATH ON" as a target property +# !!! FIXME: for the SDL2 shared library (so you get an +# !!! FIXME: install_name ("soname") of "@rpath/libSDL-whatever.dylib" +# !!! FIXME: instead of "/usr/local/lib/libSDL-whatever.dylib"), but I'm +# !!! FIXME: punting for now and leaving the existing behavior. Until this +# !!! FIXME: properly resolved, this line silences a warning in CMake 3.0+. +# !!! FIXME: remove it and this comment entirely once the problem is +# !!! FIXME: properly resolved. +#cmake_policy(SET CMP0042 OLD) + +include(CheckFunctionExists) +include(CheckLibraryExists) +include(CheckIncludeFiles) +include(CheckIncludeFile) +include(CheckSymbolExists) +include(CheckCSourceCompiles) +include(CheckCSourceRuns) +include(CheckCCompilerFlag) +include(CheckTypeSize) +include(CheckStructHasMember) +include(CMakeDependentOption) +include(FindPkgConfig) +include(GNUInstallDirs) +set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") +include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) +include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) + +# General settings +# Edit include/SDL_version.h and change the version, then: +# SDL_MICRO_VERSION += 1; +# SDL_INTERFACE_AGE += 1; +# SDL_BINARY_AGE += 1; +# if any functions have been added, set SDL_INTERFACE_AGE to 0. +# if backwards compatibility has been broken, +# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. +set(SDL_MAJOR_VERSION 2) +set(SDL_MINOR_VERSION 0) +set(SDL_MICRO_VERSION 8) +set(SDL_INTERFACE_AGE 0) +set(SDL_BINARY_AGE 8) +set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") + +# Set defaults preventing destination file conflicts +set(SDL_CMAKE_DEBUG_POSTFIX "d" + CACHE STRING "Name suffix for debug builds") + +mark_as_advanced(CMAKE_IMPORT_LIBRARY_SUFFIX SDL_CMAKE_DEBUG_POSTFIX) + +# Calculate a libtool-like version number +math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") +set(LT_REVISION "${SDL_INTERFACE_AGE}") +set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") +set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") + +message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") + +# General settings & flags +set(LIBRARY_OUTPUT_DIRECTORY "build") +# Check for 64 or 32 bit +set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) +if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(ARCH_64 TRUE) + set(PROCESSOR_ARCH "x64") +else() + set(ARCH_64 FALSE) + set(PROCESSOR_ARCH "x86") +endif() +set(LIBNAME SDL2) +if(NOT LIBTYPE) + set(LIBTYPE SHARED) +endif() + +# Get the platform +if(WIN32) + if(NOT WINDOWS) + set(WINDOWS TRUE) + endif() +elseif(UNIX AND NOT APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") + set(LINUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") + set(NETBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") + set(OPENBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") + set(GNU TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") + set(BSDI TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") + set(SYSV5 TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") + set(SOLARIS TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") + set(HPUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") + set(AIX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") + set(MINIX TRUE) + endif() +elseif(APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") + set(DARWIN TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") + set(MACOSX TRUE) + endif() + # TODO: iOS? +elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") + message_error("BeOS support has been removed as of SDL 2.0.2.") +elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") + set(HAIKU TRUE) +endif() + +# Don't mistake osx for unix +if(UNIX AND NOT APPLE) + set(UNIX_SYS ON) +else() + set(UNIX_SYS OFF) +endif() + +if(UNIX OR APPLE) + set(UNIX_OR_MAC_SYS ON) +else() + set(UNIX_OR_MAC_SYS OFF) +endif() + +if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. + set(SDL_PTHREADS_ENABLED_BY_DEFAULT ON) +else() + set(SDL_PTHREADS_ENABLED_BY_DEFAULT OFF) +endif() + +# Default option knobs +if(APPLE OR ARCH_64) + if(NOT "${CMAKE_OSX_ARCHITECTURES}" MATCHES "arm") + set(OPT_DEF_SSEMATH ON) + endif() +endif() +if(UNIX OR MINGW OR MSYS) + set(OPT_DEF_LIBC ON) +endif() + +# Compiler info +if(CMAKE_COMPILER_IS_GNUCC) + set(USE_GCC TRUE) + set(OPT_DEF_ASM TRUE) +elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") + set(USE_CLANG TRUE) + set(OPT_DEF_ASM TRUE) +elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ + set(OPT_DEF_ASM TRUE) + #set(CMAKE_C_FLAGS "/ZI /WX- / +else() + set(OPT_DEF_ASM FALSE) +endif() + +if(USE_GCC OR USE_CLANG) + set(OPT_DEF_GCC_ATOMICS ON) +endif() + +# Default flags, if not set otherwise +if("$ENV{CFLAGS}" STREQUAL "") + if(CMAKE_BUILD_TYPE STREQUAL "") + if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "-g -O3") + endif() + endif() +else() + set(CMAKE_C_FLAGS "$ENV{CFLAGS}") + list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") +endif() +if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 + list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") +endif() + +if(MSVC) + option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) + if(FORCE_STATIC_VCRT) + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach() + endif() + + # Make sure /RTC1 is disabled, otherwise it will use functions from the CRT + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + string(REGEX REPLACE "/RTC(su|[1su])" "" ${flag_var} "${${flag_var}}") + endforeach(flag_var) +endif() + +# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, +# etc. are created correctly. +set(SDL_LIBS "-lSDL2") +set(SDL_CFLAGS "") + +# Emscripten toolchain has a nonempty default value for this, and the checks +# in this file need to change that, so remember the original value, and +# restore back to that afterwards. For check_function_exists() to work in +# Emscripten, this value must be at its default value. +set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) + +if(CYGWIN) + # We build SDL on cygwin without the UNIX emulation layer + include_directories("-I/usr/include/mingw") + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mno-cygwin") + check_c_source_compiles("int main(int argc, char **argv) {}" + HAVE_GCC_NO_CYGWIN) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_GCC_NO_CYGWIN) + list(APPEND EXTRA_LDFLAGS "-mno-cygwin") + list(APPEND SDL_LIBS "-mno-cygwin") + endif() + set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") +endif() + +add_definitions(-DUSING_GENERATED_CONFIG_H) +# General includes +include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) +if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -idirafter ${SDL2_SOURCE_DIR}/src/video/khronos") +else() + include_directories(${SDL2_SOURCE_DIR}/src/video/khronos) +endif() + +# All these ENABLED_BY_DEFAULT vars will default to ON if not specified, so +# you only need to have a platform override them if they are disabling. +set(OPT_DEF_ASM TRUE) +if(EMSCRIPTEN) + # Set up default values for the currently supported set of subsystems: + # Emscripten/Javascript does not have assembly support, a dynamic library + # loading architecture, low-level CPU inspection or multithreading. + set(OPT_DEF_ASM FALSE) + set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) + set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) + set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) + set(SDL_DLOPEN_ENABLED_BY_DEFAULT OFF) +endif() + +if (NOT DEFINED SDL_SHARED_ENABLED_BY_DEFAULT) + set(SDL_SHARED_ENABLED_BY_DEFAULT ON) +endif() + +set(SDL_SUBSYSTEMS + Atomic Audio Video Render Events Joystick Haptic Power Threads Timers + File Loadso CPUinfo Filesystem Dlopen) +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) + set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) + endif() + option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) +endforeach() + +option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") +#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) +set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) +set_option(GCC_ATOMICS "Use gcc builtin atomics" ${OPT_DEF_GCC_ATOMICS}) +set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) +set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) +set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) +set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) +set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) +set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(SSE3 "Use SSE3 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) +set_option(DISKAUDIO "Support the disk writer audio driver" ON) +set_option(DUMMYAUDIO "Support the dummy audio driver" ON) +set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) +dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) +set_option(VIDEO_DUMMY "Use dummy video driver" ON) +set_option(VIDEO_OPENGL "Include OpenGL support" ON) +set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) +set_option(PTHREADS "Use POSIX threads for multi-threading" ${SDL_PTHREADS_ENABLED_BY_DEFAULT}) +dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) +set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${SDL_DLOPEN_ENABLED_BY_DEFAULT}) +set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) +set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) +dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) +set_option(JACK "Support the JACK audio API" ${UNIX_SYS}) +dep_option(JACK_SHARED "Dynamically load JACK audio support" ON "JACK" OFF) +set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) +dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) +set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) +dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) +set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) +dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) +set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) +set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) +set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) +set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) +dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND" OFF) +set_option(LIBSAMPLERATE "Use libsamplerate for audio rate conversion" ${UNIX_SYS}) +dep_option(LIBSAMPLERATE_SHARED "Dynamically load libsamplerate" ON "LIBSAMPLERATE" OFF) +set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) +set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) +set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) +set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) +set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) +dep_option(WAYLAND_SHARED "Dynamically load Wayland support" ON "VIDEO_WAYLAND" OFF) +dep_option(VIDEO_WAYLAND_QT_TOUCH "QtWayland server support for Wayland video driver" ON "VIDEO_WAYLAND" OFF) +set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) +dep_option(MIR_SHARED "Dynamically load Mir support" ON "VIDEO_MIR" OFF) +set_option(VIDEO_RPI "Use Raspberry Pi video driver" ${UNIX_SYS}) +dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) +set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) +foreach(_SUB ${SDL_X11_OPTIONS}) + string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) + dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) +endforeach() +set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) +set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) +set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) +set_option(VIDEO_VIVANTE "Use Vivante EGL video driver" ${UNIX_SYS}) +dep_option(VIDEO_VULKAN "Enable Vulkan support" ON "ANDROID OR APPLE OR LINUX OR WINDOWS" OFF) +set_option(VIDEO_KMSDRM "Use KMS DRM video driver" ${UNIX_SYS}) +dep_option(KMSDRM_SHARED "Dynamically load KMS DRM support" ON "VIDEO_KMSDRM" OFF) + +# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here +# The options below are for compatibility to configure's default behaviour. +# set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") +set(SDL_SHARED ON CACHE BOOL "Build a shared version of the library") +set(SDL_STATIC OFF CACHE BOOL "Build a static version of the library") + +dep_option(SDL_STATIC_PIC "Static version of the library should be built with Position Independent Code" OFF "SDL_STATIC" OFF) +set_option(SDL_TEST "Build the test directory" OFF) + + +# General source files +file(GLOB SOURCE_FILES + ${SDL2_SOURCE_DIR}/src/*.c + ${SDL2_SOURCE_DIR}/src/atomic/*.c + ${SDL2_SOURCE_DIR}/src/audio/*.c + ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c + ${SDL2_SOURCE_DIR}/src/dynapi/*.c + ${SDL2_SOURCE_DIR}/src/events/*.c + ${SDL2_SOURCE_DIR}/src/file/*.c + ${SDL2_SOURCE_DIR}/src/libm/*.c + ${SDL2_SOURCE_DIR}/src/render/*.c + ${SDL2_SOURCE_DIR}/src/render/*/*.c + ${SDL2_SOURCE_DIR}/src/stdlib/*.c + ${SDL2_SOURCE_DIR}/src/thread/*.c + ${SDL2_SOURCE_DIR}/src/timer/*.c + ${SDL2_SOURCE_DIR}/src/video/*.c + ${SDL2_SOURCE_DIR}/src/video/yuv2rgb/*.c) + + +if(ASSERTIONS STREQUAL "auto") + # Do nada - use optimization settings to determine the assertion level +elseif(ASSERTIONS STREQUAL "disabled") + set(SDL_DEFAULT_ASSERT_LEVEL 0) +elseif(ASSERTIONS STREQUAL "release") + set(SDL_DEFAULT_ASSERT_LEVEL 1) +elseif(ASSERTIONS STREQUAL "enabled") + set(SDL_DEFAULT_ASSERT_LEVEL 2) +elseif(ASSERTIONS STREQUAL "paranoid") + set(SDL_DEFAULT_ASSERT_LEVEL 3) +else() + message_error("unknown assertion level") +endif() +set(HAVE_ASSERTIONS ${ASSERTIONS}) + +# Compiler option evaluation +if(USE_GCC OR USE_CLANG) + # Check for -Wall first, so later things can override pieces of it. + check_c_compiler_flag(-Wall HAVE_GCC_WALL) + if(HAVE_GCC_WALL) + list(APPEND EXTRA_CFLAGS "-Wall") + if(HAIKU) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") + endif() + endif() + + check_c_compiler_flag(-Wdeclaration-after-statement HAVE_GCC_WDECLARATION_AFTER_STATEMENT) + if(HAVE_GCC_WDECLARATION_AFTER_STATEMENT) + check_c_compiler_flag(-Werror=declaration-after-statement HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) + if(HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) + list(APPEND EXTRA_CFLAGS "-Werror=declaration-after-statement") + endif() + list(APPEND EXTRA_CFLAGS "-Wdeclaration-after-statement") + endif() + + if(DEPENDENCY_TRACKING) + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 3 + #error Dependency tracking requires GCC 3.0 or newer + #endif + int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) + endif() + + if(GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + void *x, *y, *z; + __sync_lock_test_and_set(&a, 4); + __sync_lock_test_and_set(&x, y); + __sync_fetch_and_add(&a, 1); + __sync_bool_compare_and_swap(&a, 5, 10); + __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) + if(NOT HAVE_GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + __sync_lock_test_and_set(&a, 1); + __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) + endif() + endif() + + set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") + check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" + HAVE_GCC_PREFERRED_STACK_BOUNDARY) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror") + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 4 + #error SDL only uses visibility attributes in GCC 4 or newer + #endif + int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) + if(HAVE_GCC_FVISIBILITY) + list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + check_c_compiler_flag(-Wshadow HAVE_GCC_WSHADOW) + if(HAVE_GCC_WSHADOW) + list(APPEND EXTRA_CFLAGS "-Wshadow") + endif() + + if(APPLE) + list(APPEND EXTRA_LDFLAGS "-Wl,-undefined,error") + else() + set(CMAKE_REQUIRED_FLAGS "-Wl,--no-undefined") + check_c_compiler_flag("" HAVE_NO_UNDEFINED) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_NO_UNDEFINED) + list(APPEND EXTRA_LDFLAGS "-Wl,--no-undefined") + endif() + endif() +endif() + +if(ASSEMBLY) + if(USE_GCC OR USE_CLANG) + set(SDL_ASSEMBLY_ROUTINES 1) + # TODO: Those all seem to be quite GCC specific - needs to be + # reworked for better compiler support + set(HAVE_ASSEMBLY TRUE) + if(MMX) + set(CMAKE_REQUIRED_FLAGS "-mmmx") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __MMX__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_MMX) + if(HAVE_MMX) + list(APPEND EXTRA_CFLAGS "-mmmx") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(3DNOW) + set(CMAKE_REQUIRED_FLAGS "-m3dnow") + check_c_source_compiles(" + #include + #ifndef __3dNOW__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { + void *p = 0; + _m_prefetch(p); + }" HAVE_3DNOW) + if(HAVE_3DNOW) + list(APPEND EXTRA_CFLAGS "-m3dnow") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE) + set(CMAKE_REQUIRED_FLAGS "-msse") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE) + if(HAVE_SSE) + list(APPEND EXTRA_CFLAGS "-msse") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE2) + set(CMAKE_REQUIRED_FLAGS "-msse2") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE2__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE2) + if(HAVE_SSE2) + list(APPEND EXTRA_CFLAGS "-msse2") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE3) + set(CMAKE_REQUIRED_FLAGS "-msse3") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE3__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE3) + if(HAVE_SSE3) + list(APPEND EXTRA_CFLAGS "-msse3") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(NOT SSEMATH) + if(SSE OR SSE2 OR SSE3) + if(USE_GCC) + check_c_compiler_flag(-mfpmath=387 HAVE_FP_387) + if(HAVE_FP_387) + list(APPEND EXTRA_CFLAGS "-mfpmath=387") + endif() + endif() + set(HAVE_SSEMATH TRUE) + endif() + endif() + + check_include_file("immintrin.h" HAVE_IMMINTRIN_H) + + if(ALTIVEC) + set(CMAKE_REQUIRED_FLAGS "-maltivec") + check_c_source_compiles(" + #include + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) + check_c_source_compiles(" + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set + list(APPEND EXTRA_CFLAGS "-maltivec") + set(SDL_ALTIVEC_BLITTERS 1) + if(HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC_H 1) + endif() + endif() + endif() + elseif(MSVC_VERSION GREATER 1500) + # TODO: SDL_cpuinfo.h needs to support the user's configuration wish + # for MSVC - right now it is always activated + if(NOT ARCH_64) + set(HAVE_MMX TRUE) + set(HAVE_3DNOW TRUE) + endif() + set(HAVE_SSE TRUE) + set(HAVE_SSE2 TRUE) + set(HAVE_SSE3 TRUE) + set(SDL_ASSEMBLY_ROUTINES 1) + endif() +# TODO: +#else() +# if(USE_GCC OR USE_CLANG) +# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-sse3" "-mno-mmx") +# endif() +endif() + +# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define +# anything. +if(LIBC) + if(WINDOWS AND NOT MINGW) + set(HAVE_LIBC TRUE) + foreach(_HEADER stdio.h string.h wchar.h ctype.h math.h limits.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + set(${_HAVE_H} 1) + endforeach() + set(HAVE_SIGNAL_H 1) + foreach(_FN + malloc calloc realloc free qsort abs memset memcpy memmove memcmp + wcslen wcscmp + strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp + _stricmp _strnicmp sscanf + acos acosf asin asinf atan atanf atan2 atan2f ceil ceilf + copysign copysignf cos cosf fabs fabsf floor floorf fmod fmodf + log logf log10 log10f pow powf scalbn scalbnf sin sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(HAVE_${_UPPER} 1) + endforeach() + if(NOT CYGWIN AND NOT MINGW) + set(HAVE_ALLOCA 1) + endif() + set(HAVE_M_PI 1) + add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI + set(STDC_HEADERS 1) + else() + set(HAVE_LIBC TRUE) + check_include_file(sys/types.h HAVE_SYS_TYPES_H) + foreach(_HEADER + stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h limits.h + strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h libunwind.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + check_include_file("${_HEADER}" ${_HAVE_H}) + endforeach() + + check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) + check_type_size("size_t" SIZEOF_SIZE_T) + check_symbol_exists(M_PI math.h HAVE_M_PI) + # TODO: refine the mprotect check + check_c_source_compiles("#include + #include + int main() { }" HAVE_MPROTECT) + foreach(_FN + strtod malloc calloc realloc free getenv setenv putenv unsetenv + qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat + _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull + atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp + vsscanf vsnprintf fopen64 fseeko fseeko64 sigaction setjmp + nanosleep sysconf sysctlbyname getauxval poll + ) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + + check_library_exists(m pow "" HAVE_LIBM) + if(HAVE_LIBM) + set(CMAKE_REQUIRED_LIBRARIES m) + foreach(_FN + atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin + sinf sqrt sqrtf tan tanf acos asin) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + set(CMAKE_REQUIRED_LIBRARIES) + list(APPEND EXTRA_LIBS m) + endif() + + check_library_exists(iconv iconv_open "" HAVE_LIBICONV) + if(HAVE_LIBICONV) + list(APPEND EXTRA_LIBS iconv) + set(HAVE_ICONV 1) + endif() + + if(NOT APPLE) + check_include_file(alloca.h HAVE_ALLOCA_H) + check_function_exists(alloca HAVE_ALLOCA) + else() + set(HAVE_ALLOCA_H 1) + set(HAVE_ALLOCA 1) + endif() + + check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) + endif() +else() + if(WINDOWS) + set(HAVE_STDARG_H 1) + set(HAVE_STDDEF_H 1) + endif() +endif() + + +# Enable/disable various subsystems of the SDL library +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if(NOT SDL_${_OPT}) + set(SDL_${_OPT}_DISABLED 1) + endif() +endforeach() +if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) +endif() +if(SDL_HAPTIC) + if(NOT SDL_JOYSTICK) + # Haptic requires some private functions from the joystick subsystem. + message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") + endif() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(SDL_POWER) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) +endif() +# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: +# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set +# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), +# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings + +# General SDL subsystem options, valid for all platforms +if(SDL_AUDIO) + # CheckDummyAudio/CheckDiskAudio - valid for all platforms + if(DUMMYAUDIO) + set(SDL_AUDIO_DRIVER_DUMMY 1) + file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) + set(HAVE_DUMMYAUDIO TRUE) + endif() + if(DISKAUDIO) + set(SDL_AUDIO_DRIVER_DISK 1) + file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) + set(HAVE_DISKAUDIO TRUE) + endif() +endif() + +if(SDL_DLOPEN) + # Relevant for Unix/Darwin only + if(UNIX OR APPLE) + CheckDLOPEN() + endif() +endif() + +if(SDL_VIDEO) + if(VIDEO_DUMMY) + set(SDL_VIDEO_DRIVER_DUMMY 1) + file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) + set(HAVE_VIDEO_DUMMY TRUE) + set(HAVE_SDL_VIDEO TRUE) + endif() +endif() + +if(ANDROID) + file(GLOB ANDROID_CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_CORE_SOURCES}) + + # SDL_spinlock.c Needs to be compiled in ARM mode. + # There seems to be no better way currently to set the ARM mode. + # see: https://issuetracker.google.com/issues/62264618 + # Another option would be to set ARM mode to all compiled files + check_c_compiler_flag(-marm HAVE_ARM_MODE) + if(HAVE_ARM_MODE) + set_source_files_properties(${SDL2_SOURCE_DIR}/src/atomic/SDL_spinlock.c PROPERTIES COMPILE_FLAGS -marm) + endif() + + file(GLOB ANDROID_MAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/android/*.c) + set(SDLMAIN_SOURCES ${SDLMAIN_SOURCES} ${ANDROID_MAIN_SOURCES}) + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_ANDROID 1) + file(GLOB ANDROID_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_ANDROID 1) + file(GLOB ANDROID_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + if(SDL_HAPTIC) + set(SDL_HAPTIC_ANDROID 1) + file(GLOB ANDROID_HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_ANDROID 1) + file(GLOB ANDROID_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/android/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + if(SDL_LOADSO) + set(SDL_LOADSO_DLOPEN 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dlopen/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + if(SDL_POWER) + set(SDL_POWER_ANDROID 1) + file(GLOB ANDROID_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_ANDROID 1) + file(GLOB ANDROID_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_VIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + # Core stuff + find_library(ANDROID_DL_LIBRARY dl) + find_library(ANDROID_LOG_LIBRARY log) + find_library(ANDROID_LIBRARY_LIBRARY android) + list(APPEND EXTRA_LIBS ${ANDROID_DL_LIBRARY} ${ANDROID_LOG_LIBRARY} ${ANDROID_LIBRARY_LIBRARY}) + add_definitions(-DGL_GLEXT_PROTOTYPES) + + #enable gles + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(HAVE_VIDEO_OPENGLES TRUE) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + + find_library(OpenGLES1_LIBRARY GLESv1_CM) + find_library(OpenGLES2_LIBRARY GLESv2) + list(APPEND EXTRA_LIBS ${OpenGLES1_LIBRARY} ${OpenGLES2_LIBRARY}) + endif() + + CHECK_C_SOURCE_COMPILES(" + #if defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error Vulkan doesn't work on this configuration + #endif + int main() + { + return 0; + } + " VULKAN_PASSED_ANDROID_CHECKS) + if(NOT VULKAN_PASSED_ANDROID_CHECKS) + set(VIDEO_VULKAN OFF) + message(STATUS "Vulkan doesn't work on this configuration") + endif() + endif() + + CheckPTHREAD() + +endif() + +# Platform-specific options and settings +if(EMSCRIPTEN) + # Hide noisy warnings that intend to aid mostly during initial stages of porting a new + # project. Uncomment at will for verbose cross-compiling -I/../ path info. + add_definitions(-Wno-warn-absolute-paths) + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_EMSCRIPTEN 1) + file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_EMSCRIPTEN 1) + file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_EMSCRIPTEN 1) + file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + if(SDL_POWER) + set(SDL_POWER_EMSCRIPTEN 1) + file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + + if(CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_EMSCRIPTEN 1) + file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + #enable gles + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(HAVE_VIDEO_OPENGLES TRUE) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + endif() + endif() +elseif(UNIX AND NOT APPLE AND NOT ANDROID) + if(SDL_AUDIO) + if(SYSV5 OR SOLARIS OR HPUX) + set(SDL_AUDIO_DRIVER_SUNAUDIO 1) + file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(NETBSD) + set(SDL_AUDIO_DRIVER_NETBSD 1) + file(GLOB NETBSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/netbsd/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${NETBSD_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(AIX) + set(SDL_AUDIO_DRIVER_PAUDIO 1) + file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + CheckOSS() + CheckALSA() + CheckJACK() + CheckPulseAudio() + CheckESD() + CheckARTS() + CheckNAS() + CheckSNDIO() + CheckFusionSound() + CheckLibSampleRate() + endif() + + if(SDL_VIDEO) + # Need to check for Raspberry PI first and add platform specific compiler flags, otherwise the test for GLES fails! + CheckRPI() + CheckX11() + CheckMir() + CheckDirectFB() + CheckOpenGLX11() + CheckOpenGLESX11() + CheckWayland() + CheckVivante() + CheckKMSDRM() + endif() + + if(UNIX) + file(GLOB CORE_UNIX_SOURCES ${SDL2_SOURCE_DIR}/src/core/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_UNIX_SOURCES}) + endif() + + if(LINUX) + check_c_source_compiles(" + #include + #ifndef EVIOCGNAME + #error EVIOCGNAME() ioctl not available + #endif + int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) + + check_c_source_compiles(" + #include + #include + + int main(int argc, char **argv) + { + struct kbentry kbe; + kbe.kb_table = KG_CTRL; + ioctl(0, KDGKBENT, &kbe); + }" HAVE_INPUT_KD) + + file(GLOB CORE_LINUX_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_LINUX_SOURCES}) + + if(HAVE_INPUT_EVENTS) + set(SDL_INPUT_LINUXEV 1) + endif() + + if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) + set(SDL_HAPTIC_LINUX 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + if(HAVE_INPUT_KD) + set(SDL_INPUT_LINUXKD 1) + endif() + + check_include_file("libudev.h" HAVE_LIBUDEV_H) + + if(PKG_CONFIG_FOUND) + pkg_search_module(DBUS dbus-1 dbus) + if(DBUS_FOUND) + set(HAVE_DBUS_DBUS_H TRUE) + include_directories(${DBUS_INCLUDE_DIRS}) + list(APPEND EXTRA_LIBS ${DBUS_LIBRARIES}) + endif() + + pkg_search_module(IBUS ibus-1.0 ibus) + if(IBUS_FOUND) + set(HAVE_IBUS_IBUS_H TRUE) + include_directories(${IBUS_INCLUDE_DIRS}) + list(APPEND EXTRA_LIBS ${IBUS_LIBRARIES}) + endif() + endif() + + check_include_file("fcitx/frontend.h" HAVE_FCITX_FRONTEND_H) + endif() + + if(INPUT_TSLIB) + check_c_source_compiles(" + #include \"tslib.h\" + int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) + if(HAVE_INPUT_TSLIB) + set(SDL_INPUT_TSLIB 1) + list(APPEND EXTRA_LIBS ts) + endif() + endif() + + if(SDL_JOYSTICK) + CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? + if(LINUX AND NOT ANDROID) + set(SDL_JOYSTICK_LINUX 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + endif() + + CheckPTHREAD() + + if(CLOCK_GETTIME) + check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + list(APPEND EXTRA_LIBS rt) + set(HAVE_CLOCK_GETTIME 1) + else() + check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + endif() + + check_include_file(linux/version.h HAVE_LINUX_VERSION_H) + if(HAVE_LINUX_VERSION_H) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") + endif() + + if(SDL_POWER) + if(LINUX) + set(SDL_POWER_LINUX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_UNIX 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(RPATH) + set(SDL_RLD_FLAGS "") + if(BSDI OR FREEBSD OR LINUX OR NETBSD) + set(CMAKE_REQUIRED_FLAGS "-Wl,--enable-new-dtags") + check_c_compiler_flag("" HAVE_ENABLE_NEW_DTAGS) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ENABLE_NEW_DTAGS) + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir} -Wl,--enable-new-dtags") + else() + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") + endif() + elseif(SOLARIS) + set(SDL_RLD_FLAGS "-R\${libdir}") + endif() + set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) + set(HAVE_RPATH TRUE) + endif() + +elseif(WINDOWS) + find_program(WINDRES windres) + + check_c_source_compiles(" + #include + int main(int argc, char **argv) { }" HAVE_WIN32_CC) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(MSVC) + # Prevent codegen that would use the VC runtime libraries. + set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/GS-") + if(NOT ARCH_64) + set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/arch:SSE") + endif() + endif() + + # Check for DirectX + if(DIRECTX) + if(DEFINED MSVC_VERSION AND NOT ${MSVC_VERSION} LESS 1700) + set(USE_WINSDK_DIRECTX TRUE) + endif() + if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) + if("$ENV{DXSDK_DIR}" STREQUAL "") + message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") + endif() + set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") + endif() + + if(HAVE_WIN32_CC) + # xinput.h may need windows.h, but doesn't include it itself. + check_c_source_compiles(" + #include + #include + int main(int argc, char **argv) { }" HAVE_XINPUT_H) + check_c_source_compiles(" + #include + #include + XINPUT_GAMEPAD_EX x1; + int main(int argc, char **argv) { }" HAVE_XINPUT_GAMEPAD_EX) + check_c_source_compiles(" + #include + #include + XINPUT_STATE_EX s1; + int main(int argc, char **argv) { }" HAVE_XINPUT_STATE_EX) + else() + check_include_file(xinput.h HAVE_XINPUT_H) + endif() + + check_include_file(d3d9.h HAVE_D3D_H) + check_include_file(d3d11_1.h HAVE_D3D11_H) + check_include_file(ddraw.h HAVE_DDRAW_H) + check_include_file(dsound.h HAVE_DSOUND_H) + check_include_file(dinput.h HAVE_DINPUT_H) + check_include_file(mmdeviceapi.h HAVE_MMDEVICEAPI_H) + check_include_file(audioclient.h HAVE_AUDIOCLIENT_H) + check_include_file(dxgi.h HAVE_DXGI_H) + if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H) + set(HAVE_DIRECTX TRUE) + if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) + # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks + link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) + include_directories($ENV{DXSDK_DIR}\\Include) + endif() + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_WINMM 1) + file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + + if(HAVE_DSOUND_H) + set(SDL_AUDIO_DRIVER_DSOUND 1) + file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) + endif() + + if(HAVE_AUDIOCLIENT_H AND HAVE_MMDEVICEAPI_H) + set(SDL_AUDIO_DRIVER_WASAPI 1) + file(GLOB WASAPI_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/wasapi/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WASAPI_AUDIO_SOURCES}) + endif() + endif() + + if(SDL_VIDEO) + # requires SDL_LOADSO on Windows (IME, DX, etc.) + if(NOT SDL_LOADSO) + message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") + endif() + set(SDL_VIDEO_DRIVER_WINDOWS 1) + file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) + + if(RENDER_D3D AND HAVE_D3D_H) + set(SDL_VIDEO_RENDER_D3D 1) + set(HAVE_RENDER_D3D TRUE) + endif() + if(RENDER_D3D AND HAVE_D3D11_H) + set(SDL_VIDEO_RENDER_D3D11 1) + set(HAVE_RENDER_D3D TRUE) + endif() + set(HAVE_SDL_VIDEO TRUE) + endif() + + if(SDL_THREADS) + set(SDL_THREAD_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c + ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) + set(HAVE_SDL_THREADS TRUE) + endif() + + if(SDL_POWER) + set(SDL_POWER_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) + set(HAVE_SDL_POWER TRUE) + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_WINDOWS 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Libraries for Win32 native and MinGW + list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) + + # TODO: in configure.in the check for timers is set on + # cygwin | mingw32* - does this include mingw32CE? + if(SDL_TIMERS) + set(SDL_TIMER_WINDOWS 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(SDL_LOADSO) + set(SDL_LOADSO_WINDOWS 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(SDL_VIDEO) + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_WGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + set(HAVE_VIDEO_OPENGLES TRUE) + endif() + endif() + + if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + if(HAVE_DINPUT_H) + set(SDL_JOYSTICK_DINPUT 1) + list(APPEND EXTRA_LIBS dinput8) + if(CMAKE_COMPILER_IS_MINGW) + list(APPEND EXTRA_LIBS dxerr8) + elseif (NOT USE_WINSDK_DIRECTX) + list(APPEND EXTRA_LIBS dxerr) + endif() + endif() + if(HAVE_XINPUT_H) + set(SDL_JOYSTICK_XINPUT 1) + endif() + if(NOT HAVE_DINPUT_H AND NOT HAVE_XINPUT_H) + set(SDL_JOYSTICK_WINMM 1) + endif() + set(HAVE_SDL_JOYSTICK TRUE) + + if(SDL_HAPTIC) + if(HAVE_DINPUT_H OR HAVE_XINPUT_H) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/windows/*.c) + if(HAVE_DINPUT_H) + set(SDL_HAPTIC_DINPUT 1) + endif() + if(HAVE_XINPUT_H) + set(SDL_HAPTIC_XINPUT 1) + endif() + else() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SDL_HAPTIC_DUMMY 1) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + endif() + + file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) + if(MINGW OR CYGWIN) + list(APPEND EXTRA_LIBS mingw32) + list(APPEND EXTRA_LDFLAGS "-mwindows") + set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") + list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") + endif() +elseif(APPLE) + # TODO: rework this all for proper MacOS X, iOS and Darwin support + + # We always need these libs on macOS at the moment. + # !!! FIXME: we need Carbon for some very old API calls in + # !!! FIXME: src/video/cocoa/SDL_cocoakeyboard.c, but we should figure out + # !!! FIXME: how to dump those. + if(NOT IOS) + set(SDL_FRAMEWORK_COCOA 1) + set(SDL_FRAMEWORK_CARBON 1) + endif() + + # Requires the darwin file implementation + if(SDL_FILE) + file(GLOB EXTRA_SOURCES ${SDL2_SOURCE_DIR}/src/file/cocoa/*.m) + set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) + set(HAVE_SDL_FILE TRUE) + # !!! FIXME: why is COREVIDEO inside this if() block? + set(SDL_FRAMEWORK_COREVIDEO 1) + else() + message_error("SDL_FILE must be enabled to build on MacOS X") + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_COREAUDIO 1) + file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.m) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${AUDIO_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + set(SDL_FRAMEWORK_COREAUDIO 1) + set(SDL_FRAMEWORK_AUDIOTOOLBOX 1) + endif() + + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_IOKIT 1) + if (IOS) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/iphoneos/*.m ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + else() + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + endif() + + if(SDL_HAPTIC) + set(SDL_HAPTIC_IOKIT 1) + if (IOS) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SDL_HAPTIC_DUMMY 1) + else() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + if(NOT SDL_JOYSTICK) + message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") + endif() + endif() + + if(SDL_POWER) + set(SDL_POWER_MACOSX 1) + if (IOS) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/uikit/*.m) + else() + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_COCOA 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Actually load the frameworks at the end so we don't duplicate include. + if(SDL_FRAMEWORK_COREVIDEO) + find_library(COREVIDEO CoreVideo) + list(APPEND EXTRA_LIBS ${COREVIDEO}) + endif() + if(SDL_FRAMEWORK_COCOA) + find_library(COCOA_LIBRARY Cocoa) + list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) + endif() + if(SDL_FRAMEWORK_IOKIT) + find_library(IOKIT IOKit) + list(APPEND EXTRA_LIBS ${IOKIT}) + endif() + if(SDL_FRAMEWORK_FF) + find_library(FORCEFEEDBACK ForceFeedback) + list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) + endif() + if(SDL_FRAMEWORK_CARBON) + find_library(CARBON_LIBRARY Carbon) + list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) + endif() + if(SDL_FRAMEWORK_COREAUDIO) + find_library(COREAUDIO CoreAudio) + list(APPEND EXTRA_LIBS ${COREAUDIO}) + endif() + if(SDL_FRAMEWORK_AUDIOTOOLBOX) + find_library(AUDIOTOOLBOX AudioToolbox) + list(APPEND EXTRA_LIBS ${AUDIOTOOLBOX}) + endif() + + # iOS hack needed - http://code.google.com/p/ios-cmake/ ? + if(SDL_VIDEO) + if (IOS) + set(SDL_VIDEO_DRIVER_UIKIT 1) + file(GLOB UIKITVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/uikit/*.m) + set(SOURCE_FILES ${SOURCE_FILES} ${UIKITVIDEO_SOURCES}) + else() + CheckCOCOA() + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_CGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + set(HAVE_VIDEO_OPENGLES TRUE) + endif() + endif() + endif() + + CheckPTHREAD() +elseif(HAIKU) + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_HAIKU 1) + file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + set(SDL_FILESYSTEM_HAIKU 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + + if(SDL_TIMERS) + set(SDL_TIMER_HAIKU 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(VIDEO_OPENGL) + # TODO: Use FIND_PACKAGE(OpenGL) instead + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_BGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + list(APPEND EXTRA_LIBS GL) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +endif() + +if(VIDEO_VULKAN) + set(SDL_VIDEO_VULKAN 1) +endif() + +# Dummies +# configure.in does it differently: +# if not have X +# if enable_X { SDL_X_DISABLED = 1 } +# [add dummy sources] +# so it always adds a dummy, without checking, if it was actually requested. +# This leads to missing internal references on building, since the +# src/X/*.c does not get included. +if(NOT HAVE_SDL_JOYSTICK) + set(SDL_JOYSTICK_DISABLED 1) + if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX + + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + endif() +endif() +if(NOT HAVE_SDL_HAPTIC) + set(SDL_HAPTIC_DISABLED 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(NOT HAVE_SDL_LOADSO) + set(SDL_LOADSO_DISABLED 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) +endif() +if(NOT HAVE_SDL_FILESYSTEM) + set(SDL_FILESYSTEM_DISABLED 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) +endif() + +# We always need to have threads and timers around +if(NOT HAVE_SDL_THREADS) + set(SDL_THREADS_DISABLED 1) + file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) +endif() +if(NOT HAVE_SDL_TIMERS) + set(SDL_TIMERS_DISABLED 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) +endif() + +if(NOT SDLMAIN_SOURCES) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) +endif() + +# Append the -MMD -MT flags +# if(DEPENDENCY_TRACKING) +# if(COMPILER_IS_GNUCC) +# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") +# endif() +# endif() + +configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" + "${SDL2_BINARY_DIR}/include/SDL_config.h") + +# Prepare the flags and remove duplicates +if(EXTRA_LDFLAGS) + list(REMOVE_DUPLICATES EXTRA_LDFLAGS) +endif() +if(EXTRA_LIBS) + list(REMOVE_DUPLICATES EXTRA_LIBS) +endif() +if(EXTRA_CFLAGS) + list(REMOVE_DUPLICATES EXTRA_CFLAGS) +endif() +listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) +set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) + +# Compat helpers for the configuration files +if(NOT WINDOWS OR CYGWIN) + # TODO: we need a Windows script, too + execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) + + set(prefix ${CMAKE_INSTALL_PREFIX}) + set(exec_prefix "\${prefix}") + set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") + set(bindir "\${exec_prefix}/bin") + set(includedir "\${prefix}/include") + if(SDL_STATIC) + set(ENABLE_STATIC_TRUE "") + set(ENABLE_STATIC_FALSE "#") + else() + set(ENABLE_STATIC_TRUE "#") + set(ENABLE_STATIC_FALSE "") + endif() + if(SDL_SHARED) + set(ENABLE_SHARED_TRUE "") + set(ENABLE_SHARED_FALSE "#") + else() + set(ENABLE_SHARED_TRUE "#") + set(ENABLE_SHARED_FALSE "") + endif() + + # Clean up the different lists + listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") + set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) + list(REMOVE_DUPLICATES SDL_STATIC_LIBS) + listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) + set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) + listtostr(SDL_LIBS _SDL_LIBS) + set(SDL_LIBS ${_SDL_LIBS}) + + # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") + # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") + + configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" + "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config") + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" + "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) +endif() + +##### Info output ##### +message(STATUS "") +message(STATUS "SDL2 was configured with the following options:") +message(STATUS "") +message(STATUS "Platform: ${CMAKE_SYSTEM}") +message(STATUS "64-bit: ${ARCH_64}") +message(STATUS "Compiler: ${CMAKE_C_COMPILER}") +message(STATUS "") +message(STATUS "Subsystems:") +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + message_bool_option(${_SUB} SDL_${_OPT}) +endforeach() +message(STATUS "") +message(STATUS "Options:") +list(SORT ALLOPTIONS) +foreach(_OPT ${ALLOPTIONS}) + # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters + # Get the padding + string(LENGTH ${_OPT} _OPTLEN) + math(EXPR _PADLEN "23 - ${_OPTLEN}") + string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) + message_tested_option(${_OPT} ${_PADDING}) +endforeach() +message(STATUS "") +message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") +message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") +message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") +message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") +message(STATUS "") +message(STATUS " Build Shared Library: ${SDL_SHARED}") +message(STATUS " Build Static Library: ${SDL_STATIC}") +if(SDL_STATIC) + message(STATUS " Build Static Library with Position Independent Code: ${SDL_STATIC_PIC}") +endif() +message(STATUS "") +if(UNIX) + message(STATUS "If something was not detected, although the libraries") + message(STATUS "were installed, then make sure you have set the") + message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") + message(STATUS "") +endif() + +# Ensure that the extra cflags are used at compile time +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") + +# Always build SDLmain +add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) +target_include_directories(SDL2main PUBLIC $) +set(_INSTALL_LIBS "SDL2main") +if (NOT ANDROID) + set_target_properties(SDL2main PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) +endif() + +if(SDL_SHARED) + add_library(SDL2 SHARED ${SOURCE_FILES} ${VERSION_SOURCES}) + if(APPLE) + set_target_properties(SDL2 PROPERTIES MACOSX_RPATH 1) + elseif(UNIX AND NOT ANDROID) + set_target_properties(SDL2 PROPERTIES + VERSION ${LT_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2-${LT_RELEASE}") + else() + set_target_properties(SDL2 PROPERTIES + VERSION ${SDL_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2") + endif() + if(MSVC AND NOT LIBC) + # Don't try to link with the default set of libraries. + set_target_properties(SDL2 PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2 PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2 PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) + target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) + target_include_directories(SDL2 PUBLIC $) + if (NOT ANDROID) + set_target_properties(SDL2 PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) + endif() +endif() + +if(SDL_STATIC) + set (BUILD_SHARED_LIBS FALSE) + add_library(SDL2-static STATIC ${SOURCE_FILES}) + if (NOT SDL_SHARED OR NOT WIN32) + set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") + # Note: Apparently, OUTPUT_NAME must really be unique; even when + # CMAKE_IMPORT_LIBRARY_SUFFIX or the like are given. Otherwise + # the static build may race with the import lib and one will get + # clobbered, when the suffix is realized via subsequent rename. + endif() + set_target_properties(SDL2-static PROPERTIES POSITION_INDEPENDENT_CODE ${SDL_STATIC_PIC}) + if(MSVC AND NOT LIBC) + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + # TODO: Win32 platforms keep the same suffix .lib for import and static + # libraries - do we need to consider this? + set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) + target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) + target_include_directories(SDL2-static PUBLIC $) + if (NOT ANDROID) + set_target_properties(SDL2-static PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) + endif() +endif() + +##### Tests ##### + +if(SDL_TEST) + file(GLOB TEST_SOURCES ${SDL2_SOURCE_DIR}/src/test/*.c) + add_library(SDL2_test STATIC ${TEST_SOURCES}) + + add_subdirectory(test) +endif() + +##### Installation targets ##### +install(TARGETS ${_INSTALL_LIBS} EXPORT SDL2Targets + LIBRARY DESTINATION "lib${LIB_SUFFIX}" + ARCHIVE DESTINATION "lib${LIB_SUFFIX}" + RUNTIME DESTINATION bin) + +##### Export files ##### +if (APPLE) + set(PKG_PREFIX "SDL2.framework/Resources") +elseif (WINDOWS) + set(PKG_PREFIX "cmake") +else () + set(PKG_PREFIX "lib/cmake/SDL2") +endif () + +include(CMakePackageConfigHelpers) +write_basic_package_version_file("${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake" + VERSION ${SDL_VERSION} + COMPATIBILITY AnyNewerVersion +) + +install(EXPORT SDL2Targets + FILE SDL2Targets.cmake + NAMESPACE SDL2:: + DESTINATION ${PKG_PREFIX} +) +install( + FILES + ${CMAKE_CURRENT_SOURCE_DIR}/SDL2Config.cmake + ${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake + DESTINATION ${PKG_PREFIX} + COMPONENT Devel +) + +file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) +file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) +foreach(_FNAME ${BIN_INCLUDE_FILES}) + get_filename_component(_INCNAME ${_FNAME} NAME) + list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) +endforeach() +list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) +install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) + +if(NOT (WINDOWS OR CYGWIN)) + if(SDL_SHARED) + if (APPLE) + set(SOEXT "dylib") + else() + set(SOEXT "so") + endif() + if(NOT ANDROID) + install(CODE " + execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + \"libSDL2-2.0.${SOEXT}\" \"libSDL2.${SOEXT}\")") + install(FILES ${SDL2_BINARY_DIR}/libSDL2.${SOEXT} DESTINATION "lib${LIB_SUFFIX}") + endif() + endif() + if(FREEBSD) + # FreeBSD uses ${PREFIX}/libdata/pkgconfig + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") + else() + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc + DESTINATION "lib${LIB_SUFFIX}/pkgconfig") + endif() + install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) + # TODO: what about the .spec file? Is it only needed for RPM creation? + install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "${CMAKE_INSTALL_FULL_DATAROOTDIR}/aclocal") +endif() + +##### Uninstall target ##### + +if(NOT TARGET uninstall) + configure_file( + "${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in" + "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" + IMMEDIATE @ONLY) + + add_custom_target(uninstall + COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) +endif() + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/sync.sh b/node_modules/npm-mas-mas/cmaki_generator/sync.sh new file mode 100644 index 0000000..6ad62d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/sync.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +#pattern=*.py CMakeLists.txt +pattern="utils.cmake common.yml CMakeLists.txt *.py" +other_dir=$1 + +for i in $(ls $pattern); do + if [ -f $other_dir/$i ]; then + diff $i $other_dir/$i > /dev/null || meld $i $other_dir/$i + fi +done + diff --git a/node_modules/npm-mas-mas/cmaki_generator/third_party.py b/node_modules/npm-mas-mas/cmaki_generator/third_party.py new file mode 100644 index 0000000..aaad57e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/third_party.py @@ -0,0 +1,1508 @@ +import os +import sys +import utils +import logging +import traceback +import datetime +import hash_version +import copy +import fnmatch + + +class InvalidPlatform(Exception): + def __init__(self, plat): + self._plat = plat + def __str__(self): + return "Invalid platform detected: %s" % self._plat + + +class DontExistsFile(Exception): + def __init__(self, source_filename): + self._source_filename = source_filename + def __str__(self): + return 'Dont exists file %s' % self._source_filename + + +class FailPrepare(Exception): + def __init__(self, node): + self._node = node + def __str__(self): + return 'Failing preparing package: %s' % self._node.get_package_name() + + +class AmbiguationLibs(Exception): + def __init__(self, kind, package, build_mode): + self._kind = kind + self._package = package + self._build_mode = build_mode + def __str__(self): + return "Ambiguation in %s in %s. Mode: %s. Candidates:" % (self._kind, self._package, self._build_mode) + + +class NotFoundInDataset(Exception): + def __init__(self, msg): + self._msg = msg + def __str__(self): + return "%s" % self._msg + + +class FailThirdParty(Exception): + def __init__(self, msg): + self._msg = msg + def __str__(self): + return "%s" % self._msg + + +class Found(Exception): + pass + + +def prepare_cmakefiles(cmake_files): + if not os.path.isdir(cmake_files): + logging.error('Invalid cmake files: {}'.format(camkefiles)) + + +def get_identifier(mode): + env = os.environ.copy() + cmaki_pwd = env['CMAKI_PWD'] + if utils.is_windows(): + script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.exe') + else: + script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.sh') + if not os.path.isfile(script_identifier): + raise Exception("there is no {} script".format(script_identifier)) + env['CMAKI_INFO'] = mode + return list(utils.get_stdout(script_identifier, env=env))[0] + + +def search_fuzzy(data, fuzzy_key, fallback='default'): + for key in data: + if fnmatch.fnmatch(fuzzy_key, key): + return data[key] + else: + if fallback in data: + return data[fallback] + else: + logging.error("not found 'default' platform or %s" % fuzzy_key) + raise Exception("not found '{}'".format(fuzzy_key)) + + +if 'MODE' not in os.environ: + logging.warning('Using Debug by default. For explicit use, define environment var MODE') + os.environ['MODE'] = 'Debug' + +if 'CMAKI_INSTALL' not in os.environ: + logging.warning('Using CMAKI_INSTALL by default. For explicit use, define environment var CMAKI_INSTALL') + os.environ['CMAKI_INSTALL'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier', 'bin') + +if 'CMAKI_PWD' not in os.environ: + logging.warning('Using CMAKI_PWD by default. For explicit use, define environment var CMAKI_PWD') + os.environ['CMAKI_PWD'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier') + + +# +# INMUTABLE GLOBALS +# + +CMAKELIB_URL='https://github.com/makiolo/cmaki.git' +prefered = {} +prefered['Debug'] = ['Debug', 'RelWithDebInfo', 'Release'] +prefered['RelWithDebInfo'] = ['RelWithDebInfo', 'Release', 'Debug'] +prefered['Release'] = ['Release', 'RelWithDebInfo', 'Debug'] +magic_invalid_file = '__not_found__' +exceptions_fail_group = (OSError, IOError, ) +exceptions_fail_program = (KeyboardInterrupt, ) +uncompress_strip_default = '.' +uncompress_prefix_default = '.' +priority_default = 50 +build_unittests_foldername = 'unittest' +# detect platform +platform = get_identifier('ALL') +arch = get_identifier('ARCH') +operative_system = get_identifier('OS') +somask_id = operative_system[0] +archs = {platform: arch} +platforms = [platform] +logging.info('Detecting platform from script like: {} / {}'.format(platform, arch)) + +alias_priority_name = { 10: 'minimal', + 20: 'tools', + 30: 'third_party' } +alias_priority_name_inverse = {v: k for k, v in alias_priority_name.items()} + + +def is_valid(package_name, mask): + return (mask.find(somask_id) != -1) and (package_name != 'dummy') + + +def is_blacklisted(blacklist_file, no_blacklist, package_name): + blacklisted = False + if os.path.exists(blacklist_file): + with open(blacklist_file, 'rt') as f: + for line in f.readlines(): + if line.strip() == package_name: + blacklisted = True + break + # --no-blacklist can annular effect of blacklist + if blacklisted and (package_name in no_blacklist): + blacklisted = False + return blacklisted + + +class ThirdParty: + def __init__(self, user_parameters, name, parameters): + self.user_parameters = user_parameters + self.name = name + self.parameters = parameters + self.depends = [] + self.exceptions = [] + self.interrupted = False + self.ret = 0 # Initial return code + self.fail_stage = "" + self.blacklisted = is_blacklisted(self.user_parameters.blacklist, self.user_parameters.no_blacklist, self.get_package_name()) + self.published_invalidation = False + + + def __hash__(self): + return hash((self.get_package_name(), self.get_priority(), self.get_mask())) + + + def __eq__(self, other): + return (self.get_package_name() == other.get_package_name()) and (self.get_priority() == other.get_priority()) and (self.get_mask() == other.get_mask()) + + + def __ne__(self, other): + return not self.__eq__(other) + + + def __repr__(self): + return "%s (%s)" % (self.get_package_name(), self.get_mask()) + + + def __str__(self): + return "%s (%s)" % (self.get_package_name(), self.get_mask()) + + + def get_uncompress_strip(self, pos = 0): + try: + if isinstance(self.parameters['uncompress_strip'], list): + return self.parameters['uncompress_strip'][pos] + else: + return self.parameters['uncompress_strip'] + except KeyError: + # default value + return uncompress_strip_default + + + def get_uncompress_prefix(self, pos = 0): + try: + if isinstance(self.parameters['uncompress_prefix'], list): + return self.parameters['uncompress_prefix'][pos] + else: + return self.parameters['uncompress_prefix'] + except KeyError: + # default value + return uncompress_prefix_default + + + def get_uncompress(self, pos = 0): + try: + if self.parameters['uncompress'] is not None: + if isinstance(self.parameters['uncompress'], list): + return self.parameters['uncompress'][pos].find(somask_id) != -1 + else: + return self.parameters['uncompress'].find(somask_id) != -1 + else: + return False + except KeyError: + # default value + return True + + + def get_depends_raw(self): + return self.depends + + + def get_depends(self): + try: + return self.parameters['depends'] + except KeyError: + # default value + return None + + + def get_generate_custom_script(self, source_dir): + path_build = self.get_path_custom_script(source_dir, name='.build') + build_content = self.get_build_script_content() + if build_content is not None: + with open(path_build, 'wt') as f: + f.write(build_content) + + + def get_path_custom_script(self, source_folder, name = 'build'): + if utils.is_windows(): + path_build = os.path.join(source_folder, name + '.cmd') + else: + path_build = os.path.join(source_folder, name + '.sh') + return path_build + + + def has_custom_script(self, source_folder): + script_custom = os.path.exists( self.get_path_custom_script(source_folder) ) + return (self.get_build_script_content() is not None) or script_custom + + + def get_build_script_content(self): + try: + if not utils.is_windows(): + return self.parameters['build'] + else: + return self.parameters['build_windows'] + except KeyError: + # default value + return None + + + def get_source(self): + try: + source = self.parameters['source'] + if source is not None: + if not isinstance(source, list): + return [source] + else: + return source + else: + return [] + except KeyError: + # default value + return [] + + + def get_source_filename(self, position=0): + try: + return self.parameters['source_filename'] + except KeyError: + # default value + source = self.get_source()[position] + filename = source.split('/')[-1] + return filename + + + def get_sources_all(self, position=0): + try: + return self.parameters['sources_all'] + except KeyError: + return False + + + def get_before_copy(self): + try: + return self.parameters['before_copy'] + except KeyError: + # default value + return [] + + + def get_short_path(self): + try: + return self.parameters['short_path'] + except KeyError: + # default value + return False + + + def has_library(self, platform_info): + package = self.get_package_name() + return (('static' in platform_info) and (package != 'dummy')) or (('dynamic' in platform_info) and (package != 'dummy')) + + + def needs(self, node): + if node.is_valid(): + self.depends.append(node) + + + def get_package_name(self): + return self.name + + + def get_package_name_norm(self): + package = self.get_package_name() + for c in '-\\/:*?"<>|': + package = package.replace(c, '_') + return package + + + def get_package_name_norm_upper(self): + package_norm = self.get_package_name_norm() + return package_norm.upper() + + + def set_version(self, newversion): + self.parameters['version'] = newversion + + + def get_version(self): + try: + version = self.parameters['version'] + if version is None: + return '0.0.0.0' + else: + return version + except KeyError: + if self.get_package_name() != 'dummy': + raise Exception('[%s] Version is a mandatory field.' % self.get_package_name()) + + + def get_version_manager(self): + try: + version = self.get_version() + if version == '0.0.0.0': + return self.parameters['version_manager'] + else: + # si tiene version -> no usar renombrado git + return None + except KeyError: + return None + + + def get_cmake_target(self): + try: + return self.parameters['cmake_target'] + except KeyError: + return 'install' + + + def get_post_install(self): + try: + return self.parameters['post_install'] + except KeyError: + return [] + + + def get_priority(self): + try: + return int(self.parameters['priority']) + except KeyError: + return priority_default + + + def is_packing(self): + try: + return self.parameters['packing'] + except KeyError: + # default value + return True + + + def get_branch(self): + try: + return self.parameters['branch'] + except KeyError: + # default value + return None + + + def get_build_modes(self): + build_modes = [] + try: + if 'MODE' in os.environ and (os.environ['MODE'] != 'UNDEFINED'): + build_modes.append(os.environ['MODE']) + else: + mode = self.parameters['mode'] + if mode.find('d') != -1: + build_modes.append('Debug') + if mode.find('i') != -1: + build_modes.append('RelWithDebInfo') + if mode.find('r') != -1: + build_modes.append('Release') + except KeyError: + # no mode provided + build_modes.append('Debug') + build_modes.append('RelWithDebInfo') + build_modes.append('Release') + return build_modes + + + def get_mask(self): + try: + return self.parameters['mask'] + except KeyError: + return somask_id + + + def is_valid(self): + if self.blacklisted: + if not self.published_invalidation: + logging.debug('%s is not built because is blacklisted in %s' % (self.get_package_name(), os.path.basename(self.user_parameters.blacklist))) + self.published_invalidation = True + return False + return is_valid(self.get_package_name(), self.get_mask()) + + + def resolver(self, resolved, seen): + seen.append(self) + for edge in self.depends: + if edge not in resolved: + if edge in seen: + raise Exception('Circular reference detected: %s and %s' % (self.get_package_name(), edge.name)) + edge.resolver(resolved, seen) + if self.is_valid(): + resolved.append(self) + seen.remove(self) + + + def get_targets(self): + try: + return self.parameters['targets'] + except KeyError: + # default value + return [] + + + def get_exclude_from_all(self): + try: + return self.parameters['exclude_from_all'] + except KeyError: + # default value + return False + + + def get_exclude_from_clean(self): + try: + return self.parameters['exclude_from_clean'] + except KeyError: + # default value + return False + + + def get_unittest(self): + try: + return self.parameters['unittest'] + except KeyError: + # default value + return None + + + def get_cmake_prefix(self): + try: + cmake_prefix = self.parameters['cmake_prefix'] + if cmake_prefix.endswith('CMakeLists.txt'): + return os.path.dirname(cmake_prefix) + return cmake_prefix + except KeyError: + # default value + return "." + + + def get_generator_targets(self, plat, _, compiler_cpp, ext_sta, ext_dyn): + + package = self.get_package_name_norm() + + for targets in self.get_targets(): + + for target_name in targets: + + platform_info = None + platform_extra = None + + target_info = targets[target_name] + if 'info' in target_info: + outputinfo = search_fuzzy(target_info['info'], plat) + if outputinfo is not None: + platform_info = copy.deepcopy( outputinfo ) + + if 'extra' in target_info: + outputinfo_extra = search_fuzzy(target_info['extra'], plat) + if outputinfo_extra is not None: + platform_extra = copy.deepcopy( outputinfo_extra ) + + if (platform_info is not None) and (platform_extra is not None): + platform_info = utils.smart_merge(platform_info, platform_extra) + + # variables for use in "info" and "extra" + platform_info = utils.apply_replaces_vars(platform_info, { + 'TARGET': target_name, + 'TARGET_UPPER': target_name.upper(), + 'PACKAGE': package, + 'PACKAGE_UPPER': package.upper(), + 'PLATFORM': plat, + 'COMPILER': os.path.basename(compiler_cpp), + 'EXT_DYN': ext_dyn, + 'EXT_STA': ext_sta, + 'ARCH': archs[plat], + }) + + if platform_info is None: + logging.error('No platform info in package %s, platform %s' % (package, plat)) + logging.error("%s" % targets) + sys.exit(1) + + yield (target_name, platform_info) + + + def have_any_in_target(self, plat, key, compiler_replace_maps): + any_static = False + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): + for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + if key in platform_info: + any_static = True + return any_static + + + def get_generate_find_package(self): + try: + return self.parameters['generate_find_package'] + except KeyError: + # default value + return True + + + def compiler_iterator(self, plat, compiler_replace_maps): + + plat_parms = search_fuzzy(self.parameters['platforms'], plat) + try: + generator = plat_parms['generator'] + except KeyError: + generator = None + + try: + compilers = plat_parms['compiler'] + except KeyError: + compilers = None + + # resolve map + compiler_replace_resolved = {} + for var, value in compiler_replace_maps.items(): + new_value = value + new_value = new_value.replace('$PLATFORM', plat) + compiler_replace_resolved[var] = new_value + compiler_replace_resolved['$ARCH'] = archs[plat] + compiler_replace_resolved['${ARCH}'] = archs[plat] + + # get compiler info + compiler = get_identifier('COMPILER') + + ext_dyn = plat_parms['ext_dyn'] + ext_sta = plat_parms['ext_sta'] + if compilers is None: + compilers = [('%s, %s' % (compiler, compiler))] + + for compiler in compilers: + compilers_tuple = compiler.split(',') + assert(len(compilers_tuple) == 2) + compiler_c = compilers_tuple[0].strip() + compiler_cpp = compilers_tuple[1].strip() + + compiler_c = utils.apply_replaces(compiler_c, compiler_replace_resolved) + compiler_cpp = utils.apply_replaces(compiler_cpp, compiler_replace_resolved) + + env_new = {} + env_modified = os.environ.copy() + + for env_iter in [env_modified, env_new]: + + env_iter['COMPILER'] = str(compiler) + env_iter['PLATFORM'] = str(plat) + env_iter['PACKAGE'] = str(self.get_package_name()) + env_iter['VERSION'] = str(self.get_version()) + env_iter['ARCH'] = str(archs[plat]) + + try: + environment = plat_parms['environment'] + + try: + environment_remove = environment['remove'] + for key, values in environment_remove.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + oldpath = oldpath.replace(v, '') + env_iter[key] = oldpath + except KeyError: + pass + + # insert front with seprator = ":" + try: + environment_push_front = environment['push_front'] + for key, values in environment_push_front.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + if len(oldpath) == 0: + separator = '' + else: + # -L / -I / -R use space + if v.startswith('-'): + separator = ' ' + else: + separator = ':' + oldpath = str('%s%s%s' % (v, separator, oldpath)) + env_iter[key] = oldpath + except KeyError: + pass + + # insert back with separator " " + try: + environment_flags = environment['flags'] + for key, values in environment_flags.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + if len(oldpath) == 0: + separator = '' + else: + separator = ' ' + oldpath = str('%s%s%s' % (oldpath, separator, v)) + env_iter[key] = oldpath + except KeyError: + pass + + # insert new environment variables + try: + environment_assign = environment['assign'] + for key, value in environment_assign.items(): + value = utils.apply_replaces(value, compiler_replace_resolved) + env_iter[key] = value + except KeyError: + pass + + except KeyError: + pass + + yield (compiler_c, compiler_cpp, generator, ext_sta, ext_dyn, env_modified, env_new) + + + def remove_cmake3p(self, cmake3p_dir): + package_cmake3p = os.path.join(cmake3p_dir, self.get_base_folder()) + logging.debug('Removing cmake3p %s' % package_cmake3p) + if os.path.exists(package_cmake3p): + utils.tryremove_dir(package_cmake3p) + for dep in self.get_depends_raw(): + dep.remove_cmake3p(cmake3p_dir) + + + def get_base_folder(self): + package = self.get_package_name() + version = self.get_version() + return '%s-%s' % (package, version) + + + def get_workspace(self, plat): + package = self.get_package_name() + version = self.get_version() + return '%s-%s-%s' % (package, version, plat) + + + def get_build_directory(self, plat, build_mode): + package = self.get_package_name() + version = self.get_version() + if not self.get_short_path(): + return '.build_%s-%s-%s_%s' % (package, version, plat, build_mode) + else: + return '.bs_%s%s%s%s' % (package[:3], version[-1:], plat, build_mode) + + def get_binary_workspace(self, plat): + install_directory = os.path.join(self.user_parameters.prefix, self.get_workspace(plat)) + utils.trymkdir(install_directory) + return install_directory + + + def get_install_directory(self, plat): + install_directory = os.path.join(self.get_binary_workspace(plat), self.get_base_folder(), plat) + return install_directory + + + def get_download_directory(self): + package = self.get_package_name() + return '.download_%s' % package + + + def get_original_directory(self): + package = self.get_package_name() + return '.download_original_%s' % package + + + def apply_replace_maps(self, compiler_replace_maps): + package = self.get_package_name() + package_norm = self.get_package_name_norm() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + basedir = os.path.abspath('..') + compiler_replace_maps['$%s_BASE' % package_norm] = os.path.join(basedir, self.get_workspace('$PLATFORM'), self.get_base_folder()) + + + def generate_scripts_headers(self, compiler_replace_maps): + package = self.get_package_name() + package_norm = self.get_package_name_norm() + version = self.get_version() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + basedir = self.user_parameters.prefix + rootdir = self.user_parameters.rootdir + + # generate find.cmake + build_directory = self.get_build_directory(r"${CMAKI_PLATFORM}", r"${GLOBAL_BUILD_MODE}") + with open('find.cmake', 'wt') as f: + f.write("SET(%s_VERSION %s CACHE STRING \"Last version compiled ${PACKAGE}\" FORCE)\n" % (package_norm, version)) + f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}/include\" %s_INCLUDE)\n" % (basedir, package, version, package, version, package_norm)) + f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}\" %s_LIBDIR)\n" % (basedir, package, version, package, version, package_norm)) + f.write("file(TO_NATIVE_PATH \"%s/%s\" %s_BUILD)\n" % (rootdir, build_directory, package_norm)) + f.write("SET(%s_INCLUDE ${%s_INCLUDE} CACHE STRING \"Include dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + f.write("SET(%s_LIBDIR ${%s_LIBDIR} CACHE STRING \"Libs dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + f.write("SET(%s_BUILD ${%s_BUILD} CACHE STRING \"Build dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + + # genereate find.script / cmd + if utils.is_windows(): + build_directory = self.get_build_directory("%PLATFORM%", "%BUILD_MODE%") + with open('find.cmd', 'wt') as f: + f.write("set %s_VERSION=%s\n" % (package_norm, version)) + f.write("set %s_HOME=%s\%s-%s-%%PLATFORM%%\%s-%s\%%PLATFORM%%\n" % (package_norm, basedir, package, version, package, version)) + f.write("set %s_BASE=%s\%s-%s-%%PLATFORM%%\%s-%s\n" % (package_norm, basedir, package, version, package, version)) + f.write("set SELFHOME=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\%%PLATFORM%%\n" % (basedir)) + f.write("set SELFBASE=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\n" % (basedir)) + f.write("set %s_BUILD=%s\%s\n" % (package_norm, rootdir, build_directory)) + f.write(r"md %SELFHOME%") + f.write("\n") + else: + build_directory = self.get_build_directory("${PLATFORM}", "${BUILD_MODE}") + with open('find.script', 'wt') as f: + f.write("#!/bin/bash\n") + f.write("%s_VERSION=%s\n" % (package_norm, version)) + f.write("%s_HOME=%s/%s-%s-$PLATFORM/%s-%s/$PLATFORM\n" % (package_norm, basedir, package, version, package, version)) + f.write("%s_BASE=%s/%s-%s-$PLATFORM/%s-%s\n" % (package_norm, basedir, package, version, package, version)) + f.write("SELFHOME=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION/$PLATFORM\n" % (basedir)) + f.write("SELFBASE=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION\n" % (basedir)) + f.write("%s_BUILD=%s/%s\n" % (package_norm, rootdir, build_directory)) + f.write("mkdir -p $SELFHOME\n") + + + def remove_cmakefiles(self): + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + + + def remove_scripts_headers(self): + package = self.get_package_name() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + utils.tryremove('find.cmake') + utils.tryremove('find.script') + utils.tryremove('find.cmd') + utils.tryremove('.build.sh') + utils.tryremove('.build.cmd') + utils.tryremove_dir_empty(to_package) + + + def generate_3rdpartyversion(self, output_dir): + package = self.get_package_name() + package_norm_upper = self.get_package_name_norm_upper() + version = self.get_version() + packing = self.is_packing() + if not packing: + logging.debug("package %s, don't need 3rdpartyversion" % package) + return + thirdparty_path = os.path.join(output_dir, '3rdpartyversions') + utils.trymkdir(thirdparty_path) + with utils.working_directory(thirdparty_path): + with open('%s.cmake' % package, 'wt') as f: + f.write('SET(%s_REQUIRED_VERSION %s EXACT)\n' % (package_norm_upper, version)) + + + def _smart_uncompress(self, position, package_file_abs, uncompress_directory, destiny_directory, compiler_replace_maps): + uncompress = self.get_uncompress(position) + uncompress_strip = self.get_uncompress_strip(position) + uncompress_prefix = self.get_uncompress_prefix(position) + if uncompress: + if (uncompress_strip == uncompress_strip_default) and (uncompress_prefix == uncompress_prefix_default): + # case fast (don't need intermediate folder) + ok = utils.extract_file(package_file_abs, destiny_directory, self.get_first_environment(compiler_replace_maps)) + else: + source_with_strip = os.path.join(uncompress_directory, uncompress_strip) + destiny_with_prefix = os.path.join(destiny_directory, uncompress_prefix) + ok = utils.extract_file(package_file_abs, uncompress_directory, self.get_first_environment(compiler_replace_maps)) + utils.move_folder_recursive(source_with_strip, destiny_with_prefix) + utils.tryremove_dir(source_with_strip) + if not ok: + raise Exception('Invalid uncompressed package %s - %s' % (package, package_file_abs)) + + + def _prepare_third_party(self, position, url, build_directory, compiler_replace_maps): + package = self.get_package_name() + source_filename = self.get_source_filename(position) + uncompress_strip = self.get_uncompress_strip(position) + uncompress_prefix = self.get_uncompress_prefix(position) + uncompress = self.get_uncompress(position) + uncompress_directory = self.get_download_directory() + utils.trymkdir(uncompress_directory) + + logging.debug('source_filename = %s' % source_filename) + logging.debug('uncompress_strip = %s' % uncompress_strip) + logging.debug('uncompress_prefix = %s' % uncompress_prefix) + logging.debug('uncompress = %s' % uncompress) + + # resolve url vars + url = url.replace('$NPP_SERVER', os.environ['NPP_SERVER']) + + # files in svn + if(url.startswith('svn://')): + # strip is not implemmented with svn:// + utils.tryremove_dir( build_directory ) + logging.info('Download from svn: %s' % url) + self.safe_system( 'svn co %s %s' % (url, build_directory), compiler_replace_maps ) + # utils.tryremove_dir( os.path.join(build_directory, '.svn') ) + + elif(url.endswith('.git') or (url.find('github') != -1) or (url.find('bitbucket') != -1)) and not ( url.endswith('.zip') or url.endswith('.tar.gz') or url.endswith('.tar.bz2') or url.endswith('.tgz') or url.endswith('.py') ): + # strip is not implemmented with git:// + utils.tryremove_dir( build_directory ) + logging.info('Download from git: %s' % url) + branch = self.get_branch() + extra_cmd = '' + if branch is not None: + logging.info('clonning to branch %s' % branch) + extra_cmd = '%s' % branch + self.safe_system('git clone %s --depth=200 %s %s' % (extra_cmd, url, build_directory), compiler_replace_maps) + # self.safe_system('git clone %s %s' % (url, build_directory), compiler_replace_maps) + with utils.working_directory(build_directory): + # self.safe_system('git checkout {}'.format(extra_cmd), compiler_replace_maps) + self.safe_system('git submodule init', compiler_replace_maps) + self.safe_system('git submodule update', compiler_replace_maps) + # depends_file = self.user_parameters.depends + # if depends_file is not None: + # with utils.working_directory(build_directory): + # # leer el fichero de dependencias + # if os.path.exists(depends_file): + # data = utils.deserialize(depends_file) + # else: + # data = {} + # + # # obedecer, si trae algo util + # if package in data: + # logging.debug('data package version is %s' % data[package]) + # try: + # git_version = hash_version.to_git_version(build_directory, data[package]) + # logging.debug('data package in git version is %s' % git_version) + # logging.debug('updating to revision %s' % git_version) + # self.safe_system('git reset --hard %s' % git_version, compiler_replace_maps) + # except AssertionError: + # logging.info('using HEAD') + # + # # actualizar y reescribir + # revision = hash_version.get_last_version(build_directory) + # assert(len(revision) > 0) + # data[package] = revision + # utils.serialize(data, depends_file) + # else: + # logging.warning('not found depends file, using newest changeset') + + # file in http + elif ( url.startswith('http://') + or url.startswith('https://') + or url.endswith('.zip') + or url.endswith('.tar.gz') + or url.endswith('.tar.bz2') + or url.endswith('.tgz') + or url.endswith('.py') ): + + logging.info('Download from url: %s' % url) + # download to source_filename + package_file_abs = os.path.join(uncompress_directory, source_filename) + utils.download_from_url(url, package_file_abs) + if os.path.isfile(package_file_abs): + + # uncompress in download folder for after generate a patch with all changes + if not os.path.isdir( self.get_original_directory() ): + utils.trymkdir( self.get_original_directory() ) + logging.debug('preparing original uncompress') + # uncompress in original + self._smart_uncompress(position, package_file_abs, uncompress_directory, self.get_original_directory(), compiler_replace_maps) + else: + logging.debug('skipping original uncompress (already exists)') + + # uncompress in intermediate build directory + self._smart_uncompress(position, package_file_abs, uncompress_directory, build_directory, compiler_replace_maps) + + else: + raise DontExistsFile(source_filename) + + else: + raise Exception('Invalid source: %s - %s' % (package, url)) + + + def prepare_third_party(self, build_directory, compiler_replace_maps): + utils.trymkdir(build_directory) + package = self.get_package_name() + version = self.get_version() + sources_all = self.get_sources_all() + exceptions = [] + i = 0 + for source_url in self.get_source(): + if (source_url is None) or (len(source_url) <= 0) or (source_url == 'skip'): + logging.warning('[%s %s] Skipping preparation ...' % (package, version)) + else: + logging.warning('[%s %s] trying prepare from %s ...' % (package, version, source_url)) + try: + self._prepare_third_party(i, source_url, build_directory, compiler_replace_maps) + if not sources_all: + # sources_all = false ---> any source + # sources_all = Trie ----> all source + break + except exceptions_fail_group + exceptions_fail_program: + raise + except: + exceptions.append(sys.exc_info()) + i += 1 + if len(exceptions) > 0: + i = 0 + for exc_type, exc_value, exc_traceback in exceptions: + print ("---- Exception #%d / %d ----------" % (i+1, len(exceptions))) + traceback.print_exception(exc_type, exc_value, exc_traceback) + print ("----------------------------------") + i += 1 + raise FailPrepare(self) + + + def get_prefered_build_mode(self, prefered_build_mode_list): + build_modes = self.get_build_modes() + assert(len(prefered_build_mode_list) > 0) + prefered_build_mode = prefered_build_mode_list[0] + while (prefered_build_mode not in build_modes) and (len(prefered_build_mode_list)>0): + prefered_build_mode_list.pop(0) + if len(prefered_build_mode_list) > 0: + prefered_build_mode = prefered_build_mode_list[0] + return prefered_build_mode + + + def generate_cmake_condition(self, platforms, compiler_replace_maps): + target_uniques = set() + condition = '' + i = 0 + for plat in platforms: + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): + for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + package_lower = package.lower() + if (package_lower not in target_uniques) and (package_lower != 'dummy'): + target_uniques.add(package_lower) + if self.has_library(platform_info): + if i == 0: + condition += '(NOT TARGET %s)' % package_lower + else: + condition += ' OR (NOT TARGET %s)' % package_lower + i += 1 + return condition + + + def _search_library(self, rootdir, special_pattern): + ''' + 3 cases: + string + pattern as special string + list of strings + ''' + logging.debug('-- searching in {} with pattern: {}'.format(rootdir, special_pattern)) + + if special_pattern is None: + logging.debug('Failed searching lib in %s' % rootdir) + return False, None + + package = self.get_package_name() + if isinstance(special_pattern, list): + utils.verbose(self.user_parameters, 'Searching list %s' % special_pattern) + valid_ff = None + for ff in special_pattern: + valid, valid_ff = self._search_library(rootdir, utils.get_norm_path(ff)) + if valid: + break + return valid, valid_ff + + elif special_pattern.startswith('/') and special_pattern.endswith('/'): + pattern = special_pattern[1:-1] + utils.verbose(self.user_parameters, 'Searching rootdir %s, pattern %s' % (rootdir, pattern)) + files_found = utils.rec_glob(rootdir, pattern) + utils.verbose(self.user_parameters, 'Candidates %s' % files_found) + if len(files_found) == 1: + relfile = os.path.relpath(files_found[0], rootdir) + return True, utils.get_norm_path(relfile) + elif len(files_found) == 0: + msg = 'No library found in %s with pattern %s' % (rootdir, pattern) + logging.debug(msg) + return False, None + else: + msg = "Ambiguation in %s" % (package) + logging.debug(msg) + return False, None + else: + pathfull = os.path.join(rootdir, special_pattern) + utils.verbose(self.user_parameters, 'Checking file %s' % pathfull) + if os.path.exists(pathfull): + return True, utils.get_norm_path(special_pattern) + else: + return False, None + + + def search_library(self, workbase, dataset, kind, rootdir=None): + ''' + can throw exception + ''' + build_mode = self.get_prefered_build_mode(prefered[os.environ['MODE']]) + if rootdir is None: + rootdir = workbase + utils.verbose(self.user_parameters, 'Searching rootdir %s' % (rootdir)) + if (build_mode.lower() in dataset) and (kind in dataset[build_mode.lower()]): + special_pattern = dataset[build_mode.lower()][kind] + valid, valid_ff = self._search_library(rootdir, special_pattern) + if valid: + return valid_ff + else: + package = self.get_package_name() + raise AmbiguationLibs(kind, package, build_mode) + else: + raise NotFoundInDataset("Not found in dataset, searching %s - %s" % (build_mode.lower(), kind)) + + + def search_library_noexcept(self, workbase, dataset, kind): + try: + rootdir = os.path.abspath(workbase) + finalpath = self.search_library(workbase, dataset, kind, rootdir) + utils.superverbose(self.user_parameters, '[01] path: %s' % finalpath) + return finalpath + except AmbiguationLibs: + finalpath = '%s.%s' % (magic_invalid_file, kind) + utils.superverbose(self.user_parameters, '[02] path: %s' % finalpath) + return finalpath + except NotFoundInDataset: + finalpath = '%s.%s' % (magic_invalid_file, kind) + utils.superverbose(self.user_parameters, '[03] path: %s' % finalpath) + return finalpath + + + def check_parts_exists(self, workbase, package, target, dataset, kindlibs, build_modes=None): + ''' + Asegura que todas las partes del target existen, devuelve True o False si todas las partes existen + + workbase: directorio de instalacion base + package: nombre del paquete + target: nombre del target + dataset: es la estructura que contiene las estrategias de busqueda + {"debug": {"part1": ["*.dll", "*d.dll"]}, "release": {"part1": ["*_release.dll"]}} + kindlibs: tupla de partes a verificar, cada tupla representa (tipo, obligatoriedad) + build_modes: restringuir la busqueda a ciertos build modes + ''' + + all_ok = True + if build_modes is None: + build_modes = self.get_build_modes() + for build_mode in build_modes: + for kind, must in kindlibs: + try: + part_fullpath = os.path.join(workbase, self.search_library_noexcept(workbase, dataset, kind)) + if not os.path.exists(part_fullpath): + if must: + logging.error("[%s] Don't found %s in %s. Mode: %s. Path: %s. Dataset: %s" % (package, kind, target, build_mode, part_fullpath, dataset)) + all_ok = False + else: + msg = "[%s] Don't found %s in %s. Mode: %s. Path: %s" % (package, kind, target, build_mode, part_fullpath) + if build_mode != 'Release': + logging.warning(msg) + else: + logging.debug(msg) + except NotFoundInDataset as e: + if must: + logging.error("[ERROR] [NOT FOUND] [%s] %s" % (package, e)) + all_ok = False + return all_ok + + + def is_invalid_lib(self, libpath): + return (libpath is None) or (utils.get_filename_no_ext(os.path.basename(libpath)) == magic_invalid_file) + + + def generate_cmakefiles(self, platforms, folder_output, compiler_replace_maps): + errors = 0 + packing = self.is_packing() + if not packing: + logging.warning("package: %s don't need generate cmakefiles" % self.get_package_name()) + return errors + oldcwd = os.getcwd() + utils.trymkdir(folder_output) + with utils.working_directory(folder_output): + package = self.get_package_name() + package_lower = package.lower() + package_upper = package.upper() + with open('%s-config.cmake' % package_lower, 'wt') as f: + f.write('''CMAKE_POLICY(PUSH) +CMAKE_POLICY(VERSION 3.0) +cmake_minimum_required(VERSION 3.0) +cmake_policy(SET CMP0011 NEW) + ''') + + condition = self.generate_cmake_condition(platforms, compiler_replace_maps) + if len(condition) > 0: + f.write('\nif(%s)\n' % condition) + + f.write('''\ninclude(${CMAKI_PATH}/facts/facts.cmake) +cmaki_download_package() +file(TO_NATIVE_PATH "${_DIR}" %s_HOME) +file(TO_NATIVE_PATH "${_DIR}/${CMAKI_PLATFORM}" %s_PREFIX) +set(%s_HOME "${%s_HOME}" PARENT_SCOPE) +set(%s_PREFIX "${%s_PREFIX}" PARENT_SCOPE) +include(${_MY_DIR}/${CMAKI_PLATFORM}.cmake) + ''' % (package_upper, package_upper, package_upper, package_upper, package_upper, package_upper)) + + if len(condition) > 0: + f.write('\nendif()\n') + + f.write('\nCMAKE_POLICY(POP)') + + with open('%s-config-version.cmake' % package_lower, 'wt') as f: + f.write('''\ +cmake_minimum_required(VERSION 3.0) +cmake_policy(SET CMP0011 NEW) +include(${CMAKI_PATH}/facts/facts.cmake) +cmaki_package_version_check() + ''') + + for plat in platforms: + + workspace = self.get_workspace(plat) + base_folder = self.get_base_folder() + + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): + + with open('%s.cmake' % (plat), 'wt') as f: + + install_3rdparty_dependencies = True + + includes_set = [] + definitions_set = [] + system_depends_set = [] + depends_set = set() + + for target, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + + target_lower = target.lower() + target_upper = target.upper() + + if self.has_library(platform_info) and (target != 'dummy'): + f.write('if(NOT TARGET %s)\n\n' % target_lower) + + try: + add_3rdparty_dependencies = platform_info['add_3rdparty_dependencies'] + except KeyError: + add_3rdparty_dependencies = True + + try: + lib_provided = platform_info['lib_provided'] + except KeyError: + lib_provided = True + + if 'include' in platform_info: + include = platform_info['include'] + for d in include: + includes_set.append(d) + + # rename to definitions + if 'definitions' in platform_info: + definitions = platform_info['definitions'] + if definitions is not None: + for d in definitions: + definitions_set.append(d) + + if 'system_depends' in platform_info: + system_depends = platform_info['system_depends'] + if system_depends is not None: + for sd in system_depends: + system_depends_set.append(sd) + + if 'targets_paths' in self.parameters: + targets_paths = self.parameters['targets_paths'] + if targets_paths is not None: + for key, value in targets_paths.items(): + f.write('file(TO_NATIVE_PATH "%s" %s)\n' % (value, key)) + + # work_base = os.path.join(oldcwd, workspace, base_folder, plat) + work_base = self.get_install_directory(plat) + + if ('executable' in platform_info) and (target != 'dummy'): + # a target in mode executable, dont need install + install_3rdparty_dependencies = False + + if 'use_run_with_libs' in platform_info: + if utils.is_windows(): + f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.cmd" %s_LAUNCHER)\n' % target_upper) + else: + f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.sh" %s_LAUNCHER)\n' % target_upper) + + executable = platform_info['executable'] + if not self.check_parts_exists(work_base, package, target, executable, [('bin', True)], build_modes=['Release']): + errors += 1 + release_bin = self.search_library_noexcept(work_base, executable, 'bin') + + for suffix in ['', '_EXECUTABLE']: + if 'use_run_with_libs' in platform_info: + f.write('set(%s%s "${%s_LAUNCHER}" "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, target_upper, plat, utils.get_norm_path(release_bin, native=False))) + else: + f.write('set(%s%s "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, plat, utils.get_norm_path(release_bin, native=False))) + f.write('file(TO_NATIVE_PATH "${%s%s}" %s%s)\n' % (target_upper, suffix, target_upper, suffix)) + f.write('\n') + + if ('dynamic' in platform_info) and (target != 'dummy'): + + dynamic = platform_info['dynamic'] + + # add depend + if add_3rdparty_dependencies: + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) + + if utils.is_windows(): + if not self.check_parts_exists(work_base, package, target, dynamic, [('dll', True), ('lib', lib_provided), ('pdb', False)]): + errors += 1 + + debug_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + release_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + relwithdebinfo_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + minsizerel_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + + debug_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + release_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + relwithdebinfo_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + minsizerel_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + + try: + relwithdebinfo_pdb = self.search_library(work_base, dynamic, 'pdb') + except Exception as e: + logging.debug('exception searching lib: %s' % e) + relwithdebinfo_pdb = None + + try: + debug_pdb = self.search_library(work_base, dynamic, 'pdb') + except Exception as e: + logging.debug('exception searching lib: %s' % e) + debug_pdb = None + + f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # dll + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_dll, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_dll, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_dll, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_dll, native=False))) + f.write('\n') + + # lib + if not self.is_invalid_lib(debug_lib): + f.write('\tIMPORTED_IMPLIB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) + if not self.is_invalid_lib(release_lib): + f.write('\tIMPORTED_IMPLIB_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) + if not self.is_invalid_lib(relwithdebinfo_lib): + f.write('\tIMPORTED_IMPLIB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) + if not self.is_invalid_lib(minsizerel_lib): + f.write('\tIMPORTED_IMPLIB_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) + f.write('\n') + + # pdb + if not self.is_invalid_lib(debug_pdb): + f.write('\tIMPORTED_PDB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_pdb, native=False))) + + if not self.is_invalid_lib(relwithdebinfo_pdb): + f.write('\tIMPORTED_PDB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_pdb, native=False))) + + f.write(')\n') + else: + + if not self.check_parts_exists(work_base, package, target, dynamic, [('so', True)]): + errors += 1 + + debug_so = self.search_library_noexcept(work_base, dynamic, 'so') + release_so = self.search_library_noexcept(work_base, dynamic, 'so') + relwithdebinfo_so = self.search_library_noexcept(work_base, dynamic, 'so') + minsizerel_so = self.search_library_noexcept(work_base, dynamic, 'so') + + try: + debug_so_full = os.path.join(oldcwd, work_base, debug_so) + debug_soname = utils.get_soname(debug_so_full, env=env_modified) + logging.debug('detected soname in debug library: {}'.format(debug_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + debug_soname = None + + try: + release_so_full = os.path.join(oldcwd, work_base, release_so) + release_soname = utils.get_soname(release_so_full, env=env_modified) + logging.debug('detected soname in release library: {}'.format(release_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + release_soname = None + + try: + relwithdebinfo_so_full = os.path.join(oldcwd, work_base, relwithdebinfo_so) + relwithdebinfo_soname = utils.get_soname(relwithdebinfo_so_full, env=env_modified) + logging.debug('detected soname in relwithdebinfo library: {}'.format(relwithdebinfo_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + relwithdebinfo_soname = None + + try: + minsizerel_so_full = os.path.join(oldcwd, work_base, minsizerel_so) + minsizerel_soname = utils.get_soname(minsizerel_so_full, env=env_modified) + logging.debug('detected soname in minsizerel library: {}'.format(minsizerel_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + minsizerel_soname = None + + f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # so + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_so, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_so, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_so, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_so, native=False))) + f.write('\n') + + # soname + if (debug_soname is not None) and os.path.exists( os.path.join(os.path.dirname(debug_so_full), debug_soname) ): + f.write('\tIMPORTED_SONAME_DEBUG "%s"\n' % utils.get_norm_path(debug_soname, native=False)) + + if (release_soname is not None) and os.path.exists( os.path.join(os.path.dirname(release_so_full), release_soname) ): + f.write('\tIMPORTED_SONAME_RELEASE "%s"\n' % utils.get_norm_path(release_soname, native=False)) + + if (relwithdebinfo_soname is not None) and os.path.exists( os.path.join(os.path.dirname(relwithdebinfo_so_full), relwithdebinfo_soname) ): + f.write('\tIMPORTED_SONAME_RELWITHDEBINFO "%s"\n' % utils.get_norm_path(relwithdebinfo_soname, native=False)) + + if (minsizerel_soname is not None) and os.path.exists( os.path.join(os.path.dirname(minsizerel_so_full), minsizerel_soname) ): + f.write('\tIMPORTED_SONAME_MINSIZEREL "%s"\n' % utils.get_norm_path(minsizerel_soname, native=False)) + + f.write(')\n') + + if ('static' in platform_info) and (target != 'dummy'): + + static = platform_info['static'] + + if not self.check_parts_exists(work_base, package, target, static, [('lib', True)]): + errors += 1 + + debug_lib = self.search_library_noexcept(work_base, static, 'lib') + release_lib = self.search_library_noexcept(work_base, static, 'lib') + relwithdebinfo_lib = self.search_library_noexcept(work_base, static, 'lib') + minsizerel_lib = self.search_library_noexcept(work_base, static, 'lib') + + if add_3rdparty_dependencies: + # register target + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) + + f.write('ADD_LIBRARY(%s STATIC IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # lib + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) + + f.write(')\n') + + if install_3rdparty_dependencies and (target != 'dummy'): + f.write('cmaki_install_3rdparty(%s)\n' % target_lower) + f.write('\n') + + if self.has_library(platform_info) and (target != 'dummy'): + f.write('endif()\n\n') + + # print includes + if len(includes_set) > 0: + for d in list(set(includes_set)): + f.write('list(APPEND %s_INCLUDE_DIRS ${_DIR}/%s)\n' % (package_upper, d)) + + f.write('\n') + + if len(definitions_set) > 0: + for d in list(set(definitions_set)): + f.write('add_definitions(%s)\n' % d) + f.write('\n') + + if len(system_depends_set) > 0: + f.write('# begin system depends\n') + for sd in list(set(system_depends_set)): + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, sd)) + f.write('# end system depends\n') + + # if self.get_generate_find_package(): + # f.write('# Depends of %s (%s)\n' % (self.get_package_name(), self.get_version())) + # for dep in self.get_depends_raw(): + # package_name = dep.get_package_name() + # if package_name not in depends_set: + # if dep.have_any_in_target(plat, 'dynamic', compiler_replace_maps): + # f.write('cmaki_find_package(%s)\n' % (package_name)) + # else: + # f.write('# cmaki_find_package(%s) # static package\n' % (package_name)) + # depends_set.add(package_name) + # f.write('\n') + + logging.info('----------------------------------------------------') + if self.user_parameters.fast: + logging.debug('skipping for because is in fast mode: "generate_cmakefiles"') + break + + return errors + + + def show_environment_vars(self, env_modified): + package = self.get_package_name() + logging.debug('------- begin print environment variables for compile %s ---------' % package) + for key, value in sorted(env_modified.items()): + logging.debug("%s=%s" % (key, value)) + logging.debug('------- end print environment variables for compile %s -----------' % package) + + + def get_first_environment(self, compiler_replace_maps): + for plat in platforms: + for _, _, _, _, _, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): + return env_modified + return os.environ.copy() + + + def safe_system(self, cmd, compiler_replace_maps): + return utils.safe_system(cmd, env=self.get_first_environment(compiler_replace_maps)) + + + def remove_packages(self): + # remove packages before + for plat in platforms: + prefix_package = os.path.join(self.user_parameters.prefix, '%s.tar.gz' % self.get_workspace(plat)) + prefix_package_cmake = os.path.join(self.user_parameters.prefix, '%s-cmakelib-%s.tar.gz' % (self.get_base_folder(), sys.platform)) + prefix_folder_cmake = os.path.join(self.user_parameters.third_party_dir, self.get_base_folder()) + logging.info("preremoving package %s" % prefix_package) + logging.info("preremoving package cmakefiles %s" % prefix_package_cmake) + logging.info("preremoving folder cmakefiles %s" % prefix_folder_cmake) + utils.tryremove(prefix_package) + utils.tryremove(prefix_package_cmake) + utils.tryremove_dir(prefix_folder_cmake) + + diff --git a/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt new file mode 100644 index 0000000..a7a3475 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt @@ -0,0 +1,30 @@ +PROJECT(UNITEST_CMAKI_GENERATOR_${CMAKI_PLATFORM}_${CMAKE_BUILD_TYPE} CXX) +cmake_minimum_required(VERSION 3.0) + +include(cmaki) + +get_filename_component(BASEDIR "${CMAKE_CURRENT_LIST_FILE}" PATH) +set(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_BINARY_DIR}) +set(EXECUTABLE_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") +set(LIBRARY_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") + +foreach(PACKAGE_ITER ${FIND_PACKAGES}) + string(TOUPPER ${PACKAGE_ITER} PACKAGE_UPPER) + string(REGEX REPLACE "-" "_" PACKAGE_UPPER ${PACKAGE_UPPER}) + include("${DEPENDS_PATH}/3rdpartyversions/${PACKAGE_ITER}.cmake") + message("find_package in test: ${PACKAGE_UPPER}, version: ${${PACKAGE_UPPER}_REQUIRED_VERSION}") + cmaki_find_package(${PACKAGE_ITER} ${${PACKAGE_UPPER}_REQUIRED_VERSION}) +endforeach() +message("include dirs: ${CMAKI_INCLUDE_DIRS}") +message("libs to link in test: ${CMAKI_LIBRARIES}") + +foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) +endforeach() +add_executable(test_${CMAKI_PLATFORM} ${UNITTEST_PATH}) +target_link_libraries(test_${CMAKI_PLATFORM} ${CMAKI_LIBRARIES}) +install(TARGETS test_${CMAKI_PLATFORM} DESTINATION "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") + +enable_testing() +add_test(NAME test_cmake_${CMAKI_PLATFORM} COMMAND test_${CMAKI_PLATFORM} WORKING_DIRECTORY "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") + diff --git a/node_modules/npm-mas-mas/cmaki_generator/upload.py b/node_modules/npm-mas-mas/cmaki_generator/upload.py new file mode 100644 index 0000000..034813c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/upload.py @@ -0,0 +1,35 @@ +import os +import logging +import utils +from third_party import platforms + + +def upload(node, parameters, compiler_replace_maps): + + if parameters.server is None: + logging.warning('parameter --server is mandatory for upload, skipping upload') + else: + # pack tar.gz binaries + for plat in platforms: + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % node.get_workspace(plat)) + if not os.path.isfile(prefix_package): + logging.error('error dont exitsts: {}'.format(prefix_package)) + return False + command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package) + node.ret += abs(utils.safe_system(command)) + + if node.ret != 0: + return False + + # pack cmakefiles + if not parameters.no_packing_cmakefiles: + for plat in platforms: + base_folder = node.get_base_folder() + prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (base_folder, plat)) + if not os.path.isfile(prefix_package_cmake): + logging.error('error dont exitsts: {}'.format(prefix_package_cmake)) + return False + command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package_cmake) + node.ret += abs(utils.safe_system(command)) + + return True diff --git a/node_modules/npm-mas-mas/cmaki_generator/upload_package.py b/node_modules/npm-mas-mas/cmaki_generator/upload_package.py new file mode 100644 index 0000000..1d57c34 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/upload_package.py @@ -0,0 +1,48 @@ +import os +import sys +import logging +# import urllib2 +import argparse +import logging +# import poster +import requests + +logger = logging.getLogger(__name__) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--url', required=True, dest='url', help='url') + parser.add_argument('--filename', required=True, dest='filename', help='filename') + parser.add_argument('--field', dest='field', help='field name', default='uploaded') + parameters = parser.parse_args() + + if not os.path.exists(parameters.filename): + logging.error('dont exists %s' % parameters.filename) + sys.exit(1) + + with open(parameters.filename, 'rb') as f: + try: + response = requests.post(parameters.url, files={parameters.field: f}) + if response.status_code == 200: + sys.exit(0) + else: + logger.error('Error uploading file {} to {}'.format(parameters.filename, parameters.url)) + sys.exit(0) + except Exception as e: + logger.error('Exception uploading file {} to {}'.format(parameters.filename, parameters.url)) + sys.exit(0) + + # # Register the streaming http handlers with urllib2 + # poster.streaminghttp.register_openers() + # + # with open(parameters.filename, "rb") as f: + # datagen, headers = poster.encode.multipart_encode({parameters.field: f}) + # # Create the Request object + # request = urllib2.Request(parameters.url, datagen, headers) + # # Actually do the request, and get the response + # handler = urllib2.urlopen(request) + # logging.info( handler.read() ) + # if handler.getcode() == 200: + # sys.exit(0) + # else: + # sys.exit(1) diff --git a/node_modules/npm-mas-mas/cmaki_generator/utils.py b/node_modules/npm-mas-mas/cmaki_generator/utils.py new file mode 100644 index 0000000..767d218 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_generator/utils.py @@ -0,0 +1,531 @@ +import os +import re +import sys +import shutil +import logging +import glob +import subprocess +import tarfile +import zipfile +import time +import contextlib +import hashlib +import yaml +import json +import errno +import multiprocessing +import fnmatch +from requests import get # to make GET request +from distutils.spawn import find_executable +try: + import bz2 + python_has_bz2 = True +except ImportError: + logging.debug('python module bz2 built-in is not available') + python_has_bz2 = False + + +class NotFoundProgram(Exception): + def __init__(self, msg): + self._msg = msg + def __repr__(self): + return "%s" % self._msg + + +def is_windows(): + return sys.platform.startswith("win") + + +def smart_merge(dict1, dict2): + assert(dict1 is not None) + assert(dict2 is not None) + for key, value in dict2.items(): + if isinstance(value, dict): + try: + dict1[key].update(value) + except KeyError: + dict1[key] = value + elif isinstance(value, list): + try: + dict1[key] += value + except KeyError: + dict1[key] = value + else: + dict1[key] = value + return dict1 + + +def apply_replaces(element, dictionary): + if isinstance(element, dict): + new = {} + for k,v in element.items(): + new[k] = apply_replaces(v, dictionary) + return new + elif isinstance(element, list): + new = [] + for e in element: + new.append( apply_replaces(e, dictionary) ) + return new + elif isinstance(element, bool): + return element + elif element is not None: + new_element = str(element) + for k,v in dictionary.items(): + # find in original, not in replaced + if str(element).find(k) != -1: + new_element = new_element.replace(k, v) + return new_element + else: + return None + + +def apply_replaces_vars(element, dictionary): + newdict = {} + for k,v in dictionary.items(): + newdict['$%s' % k] = v + newdict['${%s}' % k] = v + return apply_replaces(element, newdict) + + +def tryremove(filename): + try: + logging.debug('Removing file %s' % (filename)) + os.remove(filename) + except OSError: + pass + + +def _tryremove_dir(directory): + i = 0 + tries = 3 + while os.path.isdir(directory): + try: + shutil.rmtree(directory) + if not os.path.exists(directory): + i = tries + 1 + except OSError: + logging.debug('Fail removing %s. Retry %d/%d' % (directory, i + 1, tries)) + if i < tries: + time.sleep(1) + else: + raise Exception("Fail removing %s" % os.path.abspath(directory)) + finally: + i += 1 + + +def tryremove_dir(source): + logging.debug('Removing directory %s' % (source)) + if sys.platform.startswith('win'): + if os.path.isdir(source) and safe_system('rd /s /q %s' % source) != 0: + raise Exception('Fail removing %s' % source) + else: + _tryremove_dir(source) + + +def tryremove_dir_empty(source): + try: + os.rmdir(source) + except OSError as ex: + if ex.errno != errno.ENOTEMPTY: + logging.debug('Removing empty directory %s' % (source)) + + +def download_from_url(url, file_name): + with open(file_name, "wb") as file: + response = get(url) + file.write(response.content) + + +def setup_logging(level, logname): + format_console_log = '%(asctime)s %(levelname)-7s %(message)s' + format_date = '%H-%M:%S' + dirlog = os.path.dirname(logname) + if dirlog != '': + trymkdir(dirlog) + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + if(len(logging.root.handlers) == 1): + logging.root.removeHandler( logging.root.handlers[0] ) + handler = logging.StreamHandler() + handler.setLevel(level) + handler.setFormatter(logging.Formatter(format_console_log, format_date)) + logger.addHandler(handler) + handler2 = logging.FileHandler(logname) + handler2.setLevel(logging.DEBUG) + handler2.setFormatter(logging.Formatter(format_console_log, format_date)) + logger.addHandler(handler2) + + +def prompt_yes_no(default = False): + # raw_input returns the empty string for "enter" + yes = set(['yes','y', 'ye', '']) + no = set(['no','n']) + + choice = raw_input().lower() + if choice in yes: + return True + elif choice in no: + return False + else: + sys.stdout.write("Please respond with 'yes' or 'no'") + return default + + +def show_element(element, deep = 0): + if isinstance(element, dict): + for k,v in element.items(): + logging.info("%s<%s>" % ('\t'*deep, k)) + show_element(v, deep + 1) + elif isinstance(element, list): + for e in element: + show_element(e, deep + 1) + else: + logging.info('%s%s' % ('\t'*deep, element)) + + + +def rec_glob(rootdir, pattern): + + # logging.info('---> {} [START]'.format(rootdir)) + result = [] + for root, dirs, files in os.walk(rootdir): + # logging.info('---> {}'.format(root)) + for file in files: + # logging.info('---> {}'.format(file)) + if fnmatch.fnmatch(file, pattern): + # logging.info('---> {} [MATCH]'.format(file)) + result.append(os.path.join(root, file)) + return result + + +def trymkdir(directory): + if not os.path.exists( directory ): + os.makedirs( directory ) + + +def move_folder_recursive(source, destiny): + if not os.path.exists(source): + raise Exception('Error in move_folder_recursive: source not exists: %s' % source) + logging.debug('move recursive from {} to {}'.format(source, destiny)) + for archive in os.listdir(source): + # ignore some stuff + if archive.startswith('.git') or archive.startswith('.svn'): + continue + archive2 = os.path.join(source, archive) + destiny2 = os.path.join(destiny, archive) + if(os.path.isdir(archive2)): + move_folder_recursive(archive2, destiny2) + else: + if os.path.isfile(destiny2): + logging.debug('Replacing file %s' % destiny2) + tryremove(destiny2) + # try create destiny directory + trymkdir( os.path.dirname(destiny2) ) + # move file + shutil.move(archive2, destiny2) + + +def copy_folder_recursive(source, destiny): + if not os.path.exists(source): + raise Exception('Error in copy_folder_recursive: source not exists: %s' % source) + for archive in os.listdir(source): + # ignore some stuff + if archive.startswith('.git') or archive.startswith('.svn'): + continue + archive2 = os.path.join(source, archive) + destiny2 = os.path.join(destiny, archive) + if(os.path.isdir(archive2)): + copy_folder_recursive(archive2, destiny2) + else: + if os.path.isfile(destiny2): + logging.debug('Replacing file %s' % destiny2) + tryremove(destiny2) + # try create destiny directory + trymkdir( os.path.dirname(destiny2) ) + # copy file (and stat) + shutil.copy2(archive2, destiny2) + + +def extract_file(path, to_directory, environment): + + # convert to absolute + logging.debug('Extract file %s' % path) + path = os.path.abspath(path) + + if path.endswith('.zip'): + opener, mode = zipfile.ZipFile, 'r' + # elif path.endswith('.tar.gz') or path.endswith('.tgz'): + # opener, mode = tarfile.open, 'r:gz' + elif path.endswith('.tar.gz') or path.endswith('.tgz'): + # python have problems with big .tar.gz in linux -_- + if is_windows(): + with working_directory(to_directory): + logging.debug('Using cmake -E tar for package: %s' % path) + ret = safe_system('cmake -E tar zxvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + else: + with working_directory(to_directory): + logging.debug('Using system tar for package: %s' % path) + ret = safe_system('tar zxvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + elif path.endswith('.tar.bz2') or path.endswith('.tbz'): + # python have problems with big .tar.bz2 in windows + if is_windows(): + with working_directory(to_directory): + logging.debug('Using cmake -E tar for package: %s' % path) + ret = safe_system('cmake -E tar xvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + else: + if python_has_bz2: + opener, mode = tarfile.open, 'r:bz2' + else: + logging.warning('Not using python-bz2 module for uncompress: %s in %s' % (path, to_directory)) + with working_directory(to_directory): + logging.debug('Using bunzip2 and tar for package: %s' % path) + ret = safe_system('bunzip2 -c %s | tar xvf -' % path, env=environment) + ok = (ret == 0) + + # be careful, early return + return ok + elif path.endswith('.tar.xz'): + # needd "xz" + with working_directory(to_directory): + ret = safe_system('tar xpvf %s' % path, env=environment) + ok = (ret == 0) + return ok + else: + raise ValueError("Could not extract `%s` as no appropriate extractor is found" % path) + + # create directory if not exists + trymkdir(to_directory) + with working_directory(to_directory): + file = opener(path, mode) + try: + file.extractall() + finally: + file.close() + return True + + +# Copy Paste from run_tests (handler.py) +def detect_ncpus(): + return multiprocessing.cpu_count() + + +def get_norm_path(pathfile, native=True): + if native and is_windows(): + return pathfile.replace('/', '\\') + else: + return pathfile.replace('\\', '/') + + +def get_filename_no_ext(filename): + return os.path.splitext(filename)[0] + + +def get_soname(libfile, env=os.environ.copy()): + + if is_windows(): + logging.error('get_soname is not supported in windows') + return + + cmd = ['objdump', "-p", libfile] + for line in get_stdout(cmd, env, 'objdump'): + if line.find('SONAME') != -1: + return line.split()[1] + raise Exception('No soname detected in %s' % libfile) + + +def get_needed(libfile, env=os.environ.copy()): + + if is_windows(): + logging.error('get_needed is not supported in windows') + return + + cmd = ['objdump', "-p", libfile] + for line in get_stdout(cmd, env, 'objdump'): + if line.find('NEEDED') != -1: + yield line.split()[1] + + +def get_real_home(): + if sys.platform.startswith("sun"): + # problems launching subshell in solaris + return os.environ['HOME'] + elif sys.platform.startswith("linux"): + cmd = "REAL_HOME=$(cd $HOME && pwd -P) && echo $REAL_HOME" + for line in get_stdout(cmd): + return line + return os.environ['HOME'] + else: + return os.path.expanduser('~') + + +@contextlib.contextmanager +def working_directory(path): + prev_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +def walklevel(some_dir, level=1): + ''' + os.walk() with max level + ''' + some_dir = some_dir.rstrip(os.path.sep) + if not os.path.isdir(some_dir): + logging.error('%s is not folder' % some_dir) + sys.exit(1) + + num_sep = some_dir.count(os.path.sep) + for root, dirs, files in os.walk(some_dir): + yield root, dirs, files + num_sep_this = root.count(os.path.sep) + if num_sep + level <= num_sep_this: + del dirs[:] + + +def get_revision_svn(repo, path_svn='svn', env=os.environ.copy()): + ''' + This command need svn in PATH + ''' + if os.path.exists(repo): + with working_directory(repo): + env_copy = env.copy() + svn_bin = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'bin')) + svn_lib = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'lib')) + env_copy['PATH'] = "%s:%s" % (svn_bin, env_copy['PATH']) + env_copy['LD_LIBRARY_PATH'] = "%s:%s" % (svn_lib, env_copy['LD_LIBRARY_PATH']) + cmd = "%s info" % path_svn + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines=True, env=env_copy) + data, err = p.communicate() + + # clean stdout + data = [line.strip() for line in data.split('\n') if line.strip()] + + for line in data: + separator = 'Last Changed Rev: ' + if line.startswith(separator): + return int(line[len(separator):]) + else: + separator = 'Revisi.n del .ltimo cambio: ' + if re.match(separator, line) is not None: + return int(line[len(separator):]) + return -1 + + +def verbose(parameters, msg): + if parameters.verbose > 0: + logging.info(msg) + + +def superverbose(parameters, msg): + if parameters.verbose > 1: + logging.info(msg) + + +def hyperverbose(parameters, msg): + if parameters.verbose > 2: + logging.info(msg) + + +def md5sum(filename, blocksize=65536): + hash = hashlib.md5() + with open(filename, "rb") as f: + for block in iter(lambda: f.read(blocksize), b""): + hash.update(block) + return hash.hexdigest() + + +def serialize(pythonDict, fileName): + serialize_json(pythonDict, fileName) + + +def deserialize(fileName): + return deserialize_json(fileName) + + +def serialize_yaml(pythonDict, fileName): + serialiedData = yaml.dump(pythonDict, default_flow_style=True) + with open(fileName, 'wt') as f: + f.write(serialiedData) + + +def deserialize_yaml(fileName): + with open(fileName, 'rt') as f: + stringData = f.read() + return yaml.load(stringData) + + +def serialize_json(pythonDict, fileName): + serialiedData = json.dumps(pythonDict) + with open(fileName, 'wt') as f: + f.write(serialiedData) + + +def deserialize_json(fileName): + with open(fileName, 'rt') as f: + stringData = f.read() + return json.loads(stringData) + + +def get_stdout(cmd, env=os.environ.copy(), program_required=None): + if isinstance(cmd, list): + cmd = ' '.join(cmd) + # logging.debug('launch cmd: %s' % cmd) + + # search executable + ok = True + if program_required is not None: + ok = find_executable(program_required, env['PATH']) + if ok: + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) + data, err = p.communicate() + data = [line.strip() for line in data.split('\n') if line.strip()] + for line in data: + # logging.debug('[out cmd] %s' % line) + yield line + else: + raise NotFoundProgram('Not found program %s, for execute: %s' % (program_required, cmd)) + + +def safe_system(cmd, env=None): + if env is None: + env = os.environ.copy() + logging.debug("exec command: %s" % cmd) + + if 'CMAKI_PRINT' in env: + try: + return subprocess.call('{}'.format(cmd), env=env, shell=True) + except OSError as e: + logging.warning(str(e)) + return -1 + else: + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) + data, err = p.communicate() + data = [line for line in data.split('\n')] + if p.returncode != 0: + logging.error("begin@output: %s" % cmd) + for line in data: + if p.returncode != 0: + logging.warning(line) + else: + logging.debug(line) + if p.returncode != 0: + logging.error("end@output: %s" % cmd) + return p.returncode + + +if __name__ == '__main__': + print(rec_glob('.', '*.yml')) + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml new file mode 100644 index 0000000..cf179bc --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml @@ -0,0 +1,12 @@ +language: c +services: docker +os: linux +env: + - IMAGE=linux-x64 + # - IMAGE=windows-x86 + - IMAGE=windows-x64 + # - IMAGE=linux-x86 + - IMAGE=android-arm +# - IMAGE=browser-asmjs +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt new file mode 100644 index 0000000..5cd8b41 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt @@ -0,0 +1,6 @@ +project(cmaki_identifier_project CXX) +cmake_minimum_required(VERSION 3.0) +set(CMAKE_CXX_STANDARD 14) +include_directories(boostorg_predef/include) +enable_testing() +add_subdirectory(tests) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/README.md b/node_modules/npm-mas-mas/cmaki_identifier/README.md new file mode 100644 index 0000000..e49baa2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/README.md @@ -0,0 +1,19 @@ +# identify your platform + +gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_identifier.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_identifier) + +MSVC 2015: [![Build status](https://ci.appveyor.com/api/projects/status/tljl8xip6m8joi86?svg=true)](https://ci.appveyor.com/project/makiolo/cmaki-identifier) + +## travis: +- linux_64_glibc_2.19-gcc_4-debug +- linux_64_glibc_2.19-gcc_4-release +- linux_64_glibc_2.19-clang_3-debug +- linux_64_glibc_2.19-clang_3-release +- macos_64-clang_7-debug +- macos_64-clang_7-release + +## appveyor: +- windows_32-msvc_2015-debug +- windows_32-msvc_2015-release +- windows_64-msvc_2015-debug +- windows_64-msvc_2015-release diff --git a/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef b/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef new file mode 160000 index 0000000..a2a5010 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef @@ -0,0 +1 @@ +Subproject commit a2a5010e2824b7740890a3bf463b8c4b8927aaa7 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh new file mode 100644 index 0000000..ebffa54 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# if [ $# -e 0 ]; then +# echo $0: [ERROR], usage: ./cmaki_emulator.sh +# exit 1 +# fi + +export DIRPROGRAM="$( cd "$( dirname "$1" )" >/dev/null && pwd )" +export BASENAMEPROGRAM=$(basename "$1") +export CMAKI_PWD="${CMAKI_PWD:-$(pwd)}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" +export LD_LIBRARY_PATH=$(pwd):$LD_LIBRARY_PATH + +if [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x86" ]]; then + cd ${DIRPROGRAM} + wine ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x64" ]]; then + cd ${DIRPROGRAM} + wine ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/android-arm" ]]; then + cd ${DIRPROGRAM} + unset LD_LIBRARY_PATH + qemu-arm -L /usr/arm-linux-gnueabi ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv6" ]]; then + cd ${DIRPROGRAM} + qemu-arm ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv7" ]]; then + cd ${DIRPROGRAM} + qemu-arm ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/browser-asmjs" ]]; then + cd ${DIRPROGRAM} + nodejs ./$BASENAMEPROGRAM "${@:2}" +else + $CMAKI_EMULATOR "$1" "${@:2}" +fi + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake new file mode 100644 index 0000000..7a50cc9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake @@ -0,0 +1,12 @@ +set(PLATFORM "") +set(dirscript ${CMAKE_CURRENT_LIST_DIR}) +IF(WIN32) + set(executable cmaki_identifier.exe) +else() + set(executable cmaki_identifier.sh) +endif() +execute_process(COMMAND ${dirscript}/${executable} + OUTPUT_VARIABLE PLATFORM + OUTPUT_STRIP_TRAILING_WHITESPACE) +MESSAGE("${PLATFORM}") + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh new file mode 100755 index 0000000..371107b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh @@ -0,0 +1,14 @@ +#!/bin/bash +export DIRSCRIPT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_PWD="${CMAKI_PWD:-$DIRSCRIPT}/.." +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" + +if [ -f "cmaki_identifier.exe" ]; then + $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier.exe +else + $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier +fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt new file mode 100644 index 0000000..08224be --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt @@ -0,0 +1,113 @@ +# This is the CMakeCache file. +# For build in directory: /home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug +# It was generated by CMake: /usr/local/bin/cmake +# You can edit this file to change values found and used by cmake. +# If you do not want to change any of the values, simply exit the editor. +# If you do want to change a value, simply edit, save, and exit the editor. +# The syntax for the file is as follows: +# KEY:TYPE=VALUE +# KEY is the name of a variable in the cache. +# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. +# VALUE is the current value for the KEY. + +######################## +# EXTERNAL cache entries +######################## + +//No help, variable specified on the command line. +CMAKE_BUILD_TYPE:UNINITIALIZED=Debug + +//No help, variable specified on the command line. +CMAKE_CXX_COMPILER:UNINITIALIZED=g++ + +//No help, variable specified on the command line. +CMAKE_C_COMPILER:UNINITIALIZED=gcc + +//Value Computed by CMake. +CMAKE_FIND_PACKAGE_REDIRECTS_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/pkgRedirects + +//No help, variable specified on the command line. +CMAKE_INSTALL_PREFIX:UNINITIALIZED=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/bin + +//No help, variable specified on the command line. +CMAKE_MODULE_PATH:UNINITIALIZED=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki + +//Value Computed by CMake +CMAKE_PROJECT_DESCRIPTION:STATIC= + +//Value Computed by CMake +CMAKE_PROJECT_HOMEPAGE_URL:STATIC= + +//Value Computed by CMake +CMAKE_PROJECT_NAME:STATIC=cmaki_identifier_project + +//The CMake toolchain file +CMAKE_TOOLCHAIN_FILE:FILEPATH=no cross compile + +//No help, variable specified on the command line. +COVERAGE:UNINITIALIZED=FALSE + +//No help, variable specified on the command line. +FIRST_ERROR:UNINITIALIZED=1 + +//No help, variable specified on the command line. +NPP_CACHE:UNINITIALIZED=TRUE + +//No help, variable specified on the command line. +TESTS_VALGRIND:UNINITIALIZED=FALSE + +//No help, variable specified on the command line. +WITH_CONAN:UNINITIALIZED=0 + +//Value Computed by CMake +cmaki_identifier_project_BINARY_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug + +//Value Computed by CMake +cmaki_identifier_project_IS_TOP_LEVEL:STATIC=ON + +//Value Computed by CMake +cmaki_identifier_project_SOURCE_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier + + +######################## +# INTERNAL cache entries +######################## + +//This is the directory where this CMakeCache.txt was created +CMAKE_CACHEFILE_DIR:INTERNAL=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug +//Major version of cmake used to create the current loaded cache +CMAKE_CACHE_MAJOR_VERSION:INTERNAL=3 +//Minor version of cmake used to create the current loaded cache +CMAKE_CACHE_MINOR_VERSION:INTERNAL=31 +//Patch version of cmake used to create the current loaded cache +CMAKE_CACHE_PATCH_VERSION:INTERNAL=6 +//Path to CMake executable. +CMAKE_COMMAND:INTERNAL=/usr/local/bin/cmake +//Path to cpack program executable. +CMAKE_CPACK_COMMAND:INTERNAL=/usr/local/bin/cpack +//Path to ctest program executable. +CMAKE_CTEST_COMMAND:INTERNAL=/usr/local/bin/ctest +//Path to cache edit program executable. +CMAKE_EDIT_COMMAND:INTERNAL=/usr/local/bin/ccmake +//Name of external makefile project generator. +CMAKE_EXTRA_GENERATOR:INTERNAL= +//Name of generator. +CMAKE_GENERATOR:INTERNAL=Unix Makefiles +//Generator instance identifier. +CMAKE_GENERATOR_INSTANCE:INTERNAL= +//Name of generator platform. +CMAKE_GENERATOR_PLATFORM:INTERNAL= +//Name of generator toolset. +CMAKE_GENERATOR_TOOLSET:INTERNAL= +//Source directory with the top level CMakeLists.txt file for this +// project +CMAKE_HOME_DIRECTORY:INTERNAL=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier +//number of local generators +CMAKE_NUMBER_OF_MAKEFILES:INTERNAL=1 +//Platform information initialized +CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1 +//Path to CMake installation. +CMAKE_ROOT:INTERNAL=/usr/local/share/cmake-3.31 +//uname command +CMAKE_UNAME:INTERNAL=/usr/bin/uname + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache new file mode 100644 index 0000000..3dccd73 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache @@ -0,0 +1 @@ +# This file is generated by cmake for dependency checking of the CMakeCache.txt file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki new file mode 120000 index 0000000..1e97214 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki @@ -0,0 +1 @@ +../npm-mas-mas/cmaki_scripts/cmaki.js \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which new file mode 120000 index 0000000..6f8415e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which @@ -0,0 +1 @@ +../which/bin/node-which \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json new file mode 100644 index 0000000..a3f6d56 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json @@ -0,0 +1,471 @@ +{ + "name": "cmaki_identifier", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm-mas-mas": { + "version": "0.0.1", + "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", + "dev": true, + "license": "MIT", + "dependencies": { + "shelljs": ">=0.8.5" + }, + "bin": { + "cmaki": "cmaki_scripts/cmaki.js" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", + "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md new file mode 100644 index 0000000..e0b218b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md @@ -0,0 +1,171 @@ +# @nodelib/fs.scandir + +> List files and directories inside the specified directory. + +## :bulb: Highlights + +The package is aimed at obtaining information about entries in the directory. + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.scandir +``` + +## Usage + +```ts +import * as fsScandir from '@nodelib/fs.scandir'; + +fsScandir.scandir('path', (error, stats) => { /* … */ }); +``` + +## API + +### .scandir(path, [optionsOrSettings], callback) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. + +```ts +fsScandir.scandir('path', (error, entries) => { /* … */ }); +fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); +fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); +``` + +### .scandirSync(path, [optionsOrSettings]) + +Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. + +```ts +const entries = fsScandir.scandirSync('path'); +const entries = fsScandir.scandirSync('path', {}); +const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsScandir.Settings({ followSymbolicLinks: false }); + +const entries = fsScandir.scandirSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. +* `stats` (optional) — An instance of `fs.Stats` class. + +For example, the `scandir` call for `tools` directory with one directory inside: + +```ts +{ + dirent: Dirent { name: 'typedoc', /* … */ }, + name: 'typedoc', + path: 'tools/typedoc' +} +``` + +## Options + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} + +const settings = new fsScandir.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## `old` and `modern` mode + +This package has two modes that are used depending on the environment and parameters of use. + +### old + +* Node.js below `10.10` or when the `stats` option is enabled + +When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). + +### modern + +* Node.js 10.10+ and the `stats` option is disabled + +In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. + +This mode makes fewer calls to the file system. It's faster. + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts new file mode 100644 index 0000000..827f1db --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts @@ -0,0 +1,20 @@ +import type * as fsStat from '@nodelib/fs.stat'; +import type { Dirent, ErrnoException } from '../types'; +export interface ReaddirAsynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; + (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; +} +export interface ReaddirSynchronousMethod { + (filepath: string, options: { + withFileTypes: true; + }): Dirent[]; + (filepath: string): string[]; +} +export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { + readdir: ReaddirAsynchronousMethod; + readdirSync: ReaddirSynchronousMethod; +}; +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js new file mode 100644 index 0000000..f0fe022 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts new file mode 100644 index 0000000..33f1749 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts @@ -0,0 +1,4 @@ +/** + * IS `true` for Node.js 10.10 and greater. + */ +export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js new file mode 100644 index 0000000..7e3d441 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; +const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); +if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { + throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); +} +const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); +const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); +const SUPPORTED_MAJOR_VERSION = 10; +const SUPPORTED_MINOR_VERSION = 10; +const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; +const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; +/** + * IS `true` for Node.js 10.10 and greater. + */ +exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts new file mode 100644 index 0000000..b9da83e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Dirent, Entry } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function scandir(path: string, callback: AsyncCallback): void; +declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace scandir { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; +export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js new file mode 100644 index 0000000..99c70d3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.scandirSync = exports.scandir = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function scandir(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.scandir = scandir; +function scandirSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.scandirSync = scandirSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts new file mode 100644 index 0000000..5829676 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts @@ -0,0 +1,7 @@ +/// +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; +export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; +export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js new file mode 100644 index 0000000..e8e2f0a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const rpl = require("run-parallel"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings, callback) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + readdirWithFileTypes(directory, settings, callback); + return; + } + readdir(directory, settings, callback); +} +exports.read = read; +function readdirWithFileTypes(directory, settings, callback) { + settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const entries = dirents.map((dirent) => ({ + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + })); + if (!settings.followSymbolicLinks) { + callSuccessCallback(callback, entries); + return; + } + const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); + rpl(tasks, (rplError, rplEntries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, rplEntries); + }); + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function makeRplTaskEntry(entry, settings) { + return (done) => { + if (!entry.dirent.isSymbolicLink()) { + done(null, entry); + return; + } + settings.fs.stat(entry.path, (statError, stats) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + done(statError); + return; + } + done(null, entry); + return; + } + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + done(null, entry); + }); + }; +} +function readdir(directory, settings, callback) { + settings.fs.readdir(directory, (readdirError, names) => { + if (readdirError !== null) { + callFailureCallback(callback, readdirError); + return; + } + const tasks = names.map((name) => { + const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + return (done) => { + fsStat.stat(path, settings.fsStatSettings, (error, stats) => { + if (error !== null) { + done(error); + return; + } + const entry = { + name, + path, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + done(null, entry); + }); + }; + }); + rpl(tasks, (rplError, entries) => { + if (rplError !== null) { + callFailureCallback(callback, rplError); + return; + } + callSuccessCallback(callback, entries); + }); + }); +} +exports.readdir = readdir; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts new file mode 100644 index 0000000..2b4d08b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts @@ -0,0 +1 @@ +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js new file mode 100644 index 0000000..8724cb5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = void 0; +function joinPathSegments(a, b, separator) { + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts new file mode 100644 index 0000000..e05c8f0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts @@ -0,0 +1,5 @@ +import type Settings from '../settings'; +import type { Entry } from '../types'; +export declare function read(directory: string, settings: Settings): Entry[]; +export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; +export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js new file mode 100644 index 0000000..146db34 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; +const fsStat = require("@nodelib/fs.stat"); +const constants_1 = require("../constants"); +const utils = require("../utils"); +const common = require("./common"); +function read(directory, settings) { + if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { + return readdirWithFileTypes(directory, settings); + } + return readdir(directory, settings); +} +exports.read = read; +function readdirWithFileTypes(directory, settings) { + const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); + return dirents.map((dirent) => { + const entry = { + dirent, + name: dirent.name, + path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) + }; + if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { + try { + const stats = settings.fs.statSync(entry.path); + entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); + } + catch (error) { + if (settings.throwErrorOnBrokenSymbolicLink) { + throw error; + } + } + } + return entry; + }); +} +exports.readdirWithFileTypes = readdirWithFileTypes; +function readdir(directory, settings) { + const names = settings.fs.readdirSync(directory); + return names.map((name) => { + const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const stats = fsStat.statSync(entryPath, settings.fsStatSettings); + const entry = { + name, + path: entryPath, + dirent: utils.fs.createDirentFromStats(name, stats) + }; + if (settings.stats) { + entry.stats = stats; + } + return entry; + }); +} +exports.readdir = readdir; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts new file mode 100644 index 0000000..a0db115 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts @@ -0,0 +1,20 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLinks: boolean; + readonly fs: fs.FileSystemAdapter; + readonly pathSegmentSeparator: string; + readonly stats: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly fsStatSettings: fsStat.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js new file mode 100644 index 0000000..15a3e8c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.stats = this._getValue(this._options.stats, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + this.fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this.followSymbolicLinks, + fs: this.fs, + throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts new file mode 100644 index 0000000..f326c5e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts @@ -0,0 +1,20 @@ +/// +import type * as fs from 'fs'; +export interface Entry { + dirent: Dirent; + name: string; + path: string; + stats?: Stats; +} +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; +export interface Dirent { + isBlockDevice: () => boolean; + isCharacterDevice: () => boolean; + isDirectory: () => boolean; + isFIFO: () => boolean; + isFile: () => boolean; + isSocket: () => boolean; + isSymbolicLink: () => boolean; + name: string; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts new file mode 100644 index 0000000..bb863f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts @@ -0,0 +1,2 @@ +import type { Dirent, Stats } from '../types'; +export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts new file mode 100644 index 0000000..1b41954 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts @@ -0,0 +1,2 @@ +import * as fs from './fs'; +export { fs }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js new file mode 100644 index 0000000..f5de129 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fs = void 0; +const fs = require("./fs"); +exports.fs = fs; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json new file mode 100644 index 0000000..d3a8924 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.scandir", + "version": "2.1.5", + "description": "List files and directories inside the specified directory", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "scandir", + "readdir", + "dirent" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4", + "@types/run-parallel": "^1.1.0" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md new file mode 100644 index 0000000..686f047 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md @@ -0,0 +1,126 @@ +# @nodelib/fs.stat + +> Get the status of a file with some features. + +## :bulb: Highlights + +Wrapper around standard method `fs.lstat` and `fs.stat` with some features. + +* :beginner: Normally follows symbolic link. +* :gear: Can safely work with broken symbolic link. + +## Install + +```console +npm install @nodelib/fs.stat +``` + +## Usage + +```ts +import * as fsStat from '@nodelib/fs.stat'; + +fsStat.stat('path', (error, stats) => { /* … */ }); +``` + +## API + +### .stat(path, [optionsOrSettings], callback) + +Returns an instance of `fs.Stats` class for provided path with standard callback-style. + +```ts +fsStat.stat('path', (error, stats) => { /* … */ }); +fsStat.stat('path', {}, (error, stats) => { /* … */ }); +fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); +``` + +### .statSync(path, [optionsOrSettings]) + +Returns an instance of `fs.Stats` class for provided path. + +```ts +const stats = fsStat.stat('path'); +const stats = fsStat.stat('path', {}); +const stats = fsStat.stat('path', new fsStat.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsStat.Settings({ followSymbolicLink: false }); + +const stats = fsStat.stat('path', settings); +``` + +## Options + +### `followSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. + +### `markSymbolicLink` + +* Type: `boolean` +* Default: `false` + +Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). + +> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `fs` + +* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; +} + +const settings = new fsStat.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts new file mode 100644 index 0000000..3af759c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts @@ -0,0 +1,13 @@ +/// +import * as fs from 'fs'; +import type { ErrnoException } from '../types'; +export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; +export declare type StatSynchronousMethod = (path: string) => fs.Stats; +export interface FileSystemAdapter { + lstat: StatAsynchronousMethod; + stat: StatAsynchronousMethod; + lstatSync: StatSynchronousMethod; + statSync: StatSynchronousMethod; +} +export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js new file mode 100644 index 0000000..8dc08c8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +exports.FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + stat: fs.stat, + lstatSync: fs.lstatSync, + statSync: fs.statSync +}; +function createFileSystemAdapter(fsMethods) { + if (fsMethods === undefined) { + return exports.FILE_SYSTEM_ADAPTER; + } + return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); +} +exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts new file mode 100644 index 0000000..f95db99 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts @@ -0,0 +1,12 @@ +import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; +import * as async from './providers/async'; +import Settings, { Options } from './settings'; +import type { Stats } from './types'; +declare type AsyncCallback = async.AsyncCallback; +declare function stat(path: string, callback: AsyncCallback): void; +declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace stat { + function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; +export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js new file mode 100644 index 0000000..b23f751 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.statSync = exports.stat = exports.Settings = void 0; +const async = require("./providers/async"); +const sync = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function stat(path, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + async.read(path, getSettings(), optionsOrSettingsOrCallback); + return; + } + async.read(path, getSettings(optionsOrSettingsOrCallback), callback); +} +exports.stat = stat; +function statSync(path, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + return sync.read(path, settings); +} +exports.statSync = statSync; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts new file mode 100644 index 0000000..85423ce --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts @@ -0,0 +1,4 @@ +import type Settings from '../settings'; +import type { ErrnoException, Stats } from '../types'; +export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; +export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js new file mode 100644 index 0000000..983ff0e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings, callback) { + settings.fs.lstat(path, (lstatError, lstat) => { + if (lstatError !== null) { + callFailureCallback(callback, lstatError); + return; + } + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + callSuccessCallback(callback, lstat); + return; + } + settings.fs.stat(path, (statError, stat) => { + if (statError !== null) { + if (settings.throwErrorOnBrokenSymbolicLink) { + callFailureCallback(callback, statError); + return; + } + callSuccessCallback(callback, lstat); + return; + } + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + callSuccessCallback(callback, stat); + }); + }); +} +exports.read = read; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, result) { + callback(null, result); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts new file mode 100644 index 0000000..428c3d7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts @@ -0,0 +1,3 @@ +import type Settings from '../settings'; +import type { Stats } from '../types'; +export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js new file mode 100644 index 0000000..1521c36 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.read = void 0; +function read(path, settings) { + const lstat = settings.fs.lstatSync(path); + if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { + return lstat; + } + try { + const stat = settings.fs.statSync(path); + if (settings.markSymbolicLink) { + stat.isSymbolicLink = () => true; + } + return stat; + } + catch (error) { + if (!settings.throwErrorOnBrokenSymbolicLink) { + return lstat; + } + throw error; + } +} +exports.read = read; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts new file mode 100644 index 0000000..f4b3d44 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts @@ -0,0 +1,16 @@ +import * as fs from './adapters/fs'; +export interface Options { + followSymbolicLink?: boolean; + fs?: Partial; + markSymbolicLink?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly followSymbolicLink: boolean; + readonly fs: fs.FileSystemAdapter; + readonly markSymbolicLink: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js new file mode 100644 index 0000000..111ec09 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = require("./adapters/fs"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); + this.fs = fs.createFileSystemAdapter(this._options.fs); + this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts new file mode 100644 index 0000000..74c08ed --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts @@ -0,0 +1,4 @@ +/// +import type * as fs from 'fs'; +export declare type Stats = fs.Stats; +export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json new file mode 100644 index 0000000..f2540c2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json @@ -0,0 +1,37 @@ +{ + "name": "@nodelib/fs.stat", + "version": "2.0.5", + "description": "Get the status of a file with some features", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "stat" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md new file mode 100644 index 0000000..6ccc08d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md @@ -0,0 +1,215 @@ +# @nodelib/fs.walk + +> A library for efficiently walking a directory recursively. + +## :bulb: Highlights + +* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). +* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). +* :gear: Built-in directories/files and error filtering system. +* :link: Can safely work with broken symbolic links. + +## Install + +```console +npm install @nodelib/fs.walk +``` + +## Usage + +```ts +import * as fsWalk from '@nodelib/fs.walk'; + +fsWalk.walk('path', (error, entries) => { /* … */ }); +``` + +## API + +### .walk(path, [optionsOrSettings], callback) + +Reads the directory recursively and asynchronously. Requires a callback function. + +> :book: If you want to use the Promise API, use `util.promisify`. + +```ts +fsWalk.walk('path', (error, entries) => { /* … */ }); +fsWalk.walk('path', {}, (error, entries) => { /* … */ }); +fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); +``` + +### .walkStream(path, [optionsOrSettings]) + +Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. + +```ts +const stream = fsWalk.walkStream('path'); +const stream = fsWalk.walkStream('path', {}); +const stream = fsWalk.walkStream('path', new fsWalk.Settings()); +``` + +### .walkSync(path, [optionsOrSettings]) + +Reads the directory recursively and synchronously. Returns an array of entries. + +```ts +const entries = fsWalk.walkSync('path'); +const entries = fsWalk.walkSync('path', {}); +const entries = fsWalk.walkSync('path', new fsWalk.Settings()); +``` + +#### path + +* Required: `true` +* Type: `string | Buffer | URL` + +A path to a file. If a URL is provided, it must use the `file:` protocol. + +#### optionsOrSettings + +* Required: `false` +* Type: `Options | Settings` +* Default: An instance of `Settings` class + +An [`Options`](#options) object or an instance of [`Settings`](#settings) class. + +> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. + +### Settings([options]) + +A class of full settings of the package. + +```ts +const settings = new fsWalk.Settings({ followSymbolicLinks: true }); + +const entries = fsWalk.walkSync('path', settings); +``` + +## Entry + +* `name` — The name of the entry (`unknown.txt`). +* `path` — The path of the entry relative to call directory (`root/unknown.txt`). +* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. +* [`stats`] — An instance of `fs.Stats` class. + +## Options + +### basePath + +* Type: `string` +* Default: `undefined` + +By default, all paths are built relative to the root path. You can use this option to set custom root path. + +In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. + +```ts +fsWalk.walkSync('root'); // → ['root/file.txt'] +fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] +``` + +### concurrency + +* Type: `number` +* Default: `Infinity` + +The maximum number of concurrent calls to `fs.readdir`. + +> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). + +### deepFilter + +* Type: [`DeepFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the directory will be read deep or not. + +```ts +// Skip all directories that starts with `node_modules` +const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); +``` + +### entryFilter + +* Type: [`EntryFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that indicates whether the entry will be included to results or not. + +```ts +// Exclude all `.js` files from results +const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); +``` + +### errorFilter + +* Type: [`ErrorFilterFunction`](./src/settings.ts) +* Default: `undefined` + +A function that allows you to skip errors that occur when reading directories. + +For example, you can skip `ENOENT` errors if required: + +```ts +// Skip all ENOENT errors +const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; +``` + +### stats + +* Type: `boolean` +* Default: `false` + +Adds an instance of `fs.Stats` class to the [`Entry`](#entry). + +> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. + +### followSymbolicLinks + +* Type: `boolean` +* Default: `false` + +Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. + +### `throwErrorOnBrokenSymbolicLink` + +* Type: `boolean` +* Default: `true` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +### `pathSegmentSeparator` + +* Type: `string` +* Default: `path.sep` + +By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. + +### `fs` + +* Type: `FileSystemAdapter` +* Default: A default FS methods + +By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. + +```ts +interface FileSystemAdapter { + lstat: typeof fs.lstat; + stat: typeof fs.stat; + lstatSync: typeof fs.lstatSync; + statSync: typeof fs.statSync; + readdir: typeof fs.readdir; + readdirSync: typeof fs.readdirSync; +} + +const settings = new fsWalk.Settings({ + fs: { lstat: fakeLstat } +}); +``` + +## Changelog + +See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts new file mode 100644 index 0000000..8864c7b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts @@ -0,0 +1,14 @@ +/// +import type { Readable } from 'stream'; +import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; +import { AsyncCallback } from './providers/async'; +import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; +import type { Entry } from './types'; +declare function walk(directory: string, callback: AsyncCallback): void; +declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; +declare namespace walk { + function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; +} +declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; +declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; +export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js new file mode 100644 index 0000000..1520787 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +exports.Settings = settings_1.default; +function walk(directory, optionsOrSettingsOrCallback, callback) { + if (typeof optionsOrSettingsOrCallback === 'function') { + new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); + return; + } + new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); +} +exports.walk = walk; +function walkSync(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new sync_1.default(directory, settings); + return provider.read(); +} +exports.walkSync = walkSync; +function walkStream(directory, optionsOrSettings) { + const settings = getSettings(optionsOrSettings); + const provider = new stream_1.default(directory, settings); + return provider.read(); +} +exports.walkStream = walkStream; +function getSettings(settingsOrOptions = {}) { + if (settingsOrOptions instanceof settings_1.default) { + return settingsOrOptions; + } + return new settings_1.default(settingsOrOptions); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts new file mode 100644 index 0000000..0f6717d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts @@ -0,0 +1,12 @@ +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; +export default class AsyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + private readonly _storage; + constructor(_root: string, _settings: Settings); + read(callback: AsyncCallback): void; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js new file mode 100644 index 0000000..51d3be5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +class AsyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._storage = []; + } + read(callback) { + this._reader.onError((error) => { + callFailureCallback(callback, error); + }); + this._reader.onEntry((entry) => { + this._storage.push(entry); + }); + this._reader.onEnd(() => { + callSuccessCallback(callback, this._storage); + }); + this._reader.read(); + } +} +exports.default = AsyncProvider; +function callFailureCallback(callback, error) { + callback(error); +} +function callSuccessCallback(callback, entries) { + callback(null, entries); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts new file mode 100644 index 0000000..874f60c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts @@ -0,0 +1,4 @@ +import AsyncProvider from './async'; +import StreamProvider from './stream'; +import SyncProvider from './sync'; +export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js new file mode 100644 index 0000000..4c2529c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; +const async_1 = require("./async"); +exports.AsyncProvider = async_1.default; +const stream_1 = require("./stream"); +exports.StreamProvider = stream_1.default; +const sync_1 = require("./sync"); +exports.SyncProvider = sync_1.default; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts new file mode 100644 index 0000000..294185f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts @@ -0,0 +1,12 @@ +/// +import { Readable } from 'stream'; +import AsyncReader from '../readers/async'; +import type Settings from '../settings'; +export default class StreamProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: AsyncReader; + protected readonly _stream: Readable; + constructor(_root: string, _settings: Settings); + read(): Readable; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js new file mode 100644 index 0000000..51298b0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const async_1 = require("../readers/async"); +class StreamProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new async_1.default(this._root, this._settings); + this._stream = new stream_1.Readable({ + objectMode: true, + read: () => { }, + destroy: () => { + if (!this._reader.isDestroyed) { + this._reader.destroy(); + } + } + }); + } + read() { + this._reader.onError((error) => { + this._stream.emit('error', error); + }); + this._reader.onEntry((entry) => { + this._stream.push(entry); + }); + this._reader.onEnd(() => { + this._stream.push(null); + }); + this._reader.read(); + return this._stream; + } +} +exports.default = StreamProvider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts new file mode 100644 index 0000000..551c42e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts @@ -0,0 +1,10 @@ +import SyncReader from '../readers/sync'; +import type Settings from '../settings'; +import type { Entry } from '../types'; +export default class SyncProvider { + private readonly _root; + private readonly _settings; + protected readonly _reader: SyncReader; + constructor(_root: string, _settings: Settings); + read(): Entry[]; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js new file mode 100644 index 0000000..faab6ca --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +class SyncProvider { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._reader = new sync_1.default(this._root, this._settings); + } + read() { + return this._reader.read(); + } +} +exports.default = SyncProvider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts new file mode 100644 index 0000000..9acf4e6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts @@ -0,0 +1,30 @@ +/// +import { EventEmitter } from 'events'; +import * as fsScandir from '@nodelib/fs.scandir'; +import type Settings from '../settings'; +import type { Entry, Errno } from '../types'; +import Reader from './reader'; +declare type EntryEventCallback = (entry: Entry) => void; +declare type ErrorEventCallback = (error: Errno) => void; +declare type EndEventCallback = () => void; +export default class AsyncReader extends Reader { + protected readonly _settings: Settings; + protected readonly _scandir: typeof fsScandir.scandir; + protected readonly _emitter: EventEmitter; + private readonly _queue; + private _isFatalError; + private _isDestroyed; + constructor(_root: string, _settings: Settings); + read(): EventEmitter; + get isDestroyed(): boolean; + destroy(): void; + onEntry(callback: EntryEventCallback): void; + onError(callback: ErrorEventCallback): void; + onEnd(callback: EndEventCallback): void; + private _pushToQueue; + private _worker; + private _handleError; + private _handleEntry; + private _emitEntry; +} +export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js new file mode 100644 index 0000000..ebe8dd5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js @@ -0,0 +1,97 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const fsScandir = require("@nodelib/fs.scandir"); +const fastq = require("fastq"); +const common = require("./common"); +const reader_1 = require("./reader"); +class AsyncReader extends reader_1.default { + constructor(_root, _settings) { + super(_root, _settings); + this._settings = _settings; + this._scandir = fsScandir.scandir; + this._emitter = new events_1.EventEmitter(); + this._queue = fastq(this._worker.bind(this), this._settings.concurrency); + this._isFatalError = false; + this._isDestroyed = false; + this._queue.drain = () => { + if (!this._isFatalError) { + this._emitter.emit('end'); + } + }; + } + read() { + this._isFatalError = false; + this._isDestroyed = false; + setImmediate(() => { + this._pushToQueue(this._root, this._settings.basePath); + }); + return this._emitter; + } + get isDestroyed() { + return this._isDestroyed; + } + destroy() { + if (this._isDestroyed) { + throw new Error('The reader is already destroyed'); + } + this._isDestroyed = true; + this._queue.killAndDrain(); + } + onEntry(callback) { + this._emitter.on('entry', callback); + } + onError(callback) { + this._emitter.once('error', callback); + } + onEnd(callback) { + this._emitter.once('end', callback); + } + _pushToQueue(directory, base) { + const queueItem = { directory, base }; + this._queue.push(queueItem, (error) => { + if (error !== null) { + this._handleError(error); + } + }); + } + _worker(item, done) { + this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { + if (error !== null) { + done(error, undefined); + return; + } + for (const entry of entries) { + this._handleEntry(entry, item.base); + } + done(null, undefined); + }); + } + _handleError(error) { + if (this._isDestroyed || !common.isFatalError(this._settings, error)) { + return; + } + this._isFatalError = true; + this._isDestroyed = true; + this._emitter.emit('error', error); + } + _handleEntry(entry, base) { + if (this._isDestroyed || this._isFatalError) { + return; + } + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._emitEntry(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _emitEntry(entry) { + this._emitter.emit('entry', entry); + } +} +exports.default = AsyncReader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts new file mode 100644 index 0000000..5985f97 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts @@ -0,0 +1,7 @@ +import type { FilterFunction } from '../settings'; +import type Settings from '../settings'; +import type { Errno } from '../types'; +export declare function isFatalError(settings: Settings, error: Errno): boolean; +export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; +export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; +export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js new file mode 100644 index 0000000..a93572f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; +function isFatalError(settings, error) { + if (settings.errorFilter === null) { + return true; + } + return !settings.errorFilter(error); +} +exports.isFatalError = isFatalError; +function isAppliedFilter(filter, value) { + return filter === null || filter(value); +} +exports.isAppliedFilter = isAppliedFilter; +function replacePathSegmentSeparator(filepath, separator) { + return filepath.split(/[/\\]/).join(separator); +} +exports.replacePathSegmentSeparator = replacePathSegmentSeparator; +function joinPathSegments(a, b, separator) { + if (a === '') { + return b; + } + /** + * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). + */ + if (a.endsWith(separator)) { + return a + b; + } + return a + separator + b; +} +exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts new file mode 100644 index 0000000..e1f383b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts @@ -0,0 +1,6 @@ +import type Settings from '../settings'; +export default class Reader { + protected readonly _root: string; + protected readonly _settings: Settings; + constructor(_root: string, _settings: Settings); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js new file mode 100644 index 0000000..782f07c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const common = require("./common"); +class Reader { + constructor(_root, _settings) { + this._root = _root; + this._settings = _settings; + this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); + } +} +exports.default = Reader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts new file mode 100644 index 0000000..af41033 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts @@ -0,0 +1,15 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry } from '../types'; +import Reader from './reader'; +export default class SyncReader extends Reader { + protected readonly _scandir: typeof fsScandir.scandirSync; + private readonly _storage; + private readonly _queue; + read(): Entry[]; + private _pushToQueue; + private _handleQueue; + private _handleDirectory; + private _handleError; + private _handleEntry; + private _pushToStorage; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js new file mode 100644 index 0000000..9a8d5a6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsScandir = require("@nodelib/fs.scandir"); +const common = require("./common"); +const reader_1 = require("./reader"); +class SyncReader extends reader_1.default { + constructor() { + super(...arguments); + this._scandir = fsScandir.scandirSync; + this._storage = []; + this._queue = new Set(); + } + read() { + this._pushToQueue(this._root, this._settings.basePath); + this._handleQueue(); + return this._storage; + } + _pushToQueue(directory, base) { + this._queue.add({ directory, base }); + } + _handleQueue() { + for (const item of this._queue.values()) { + this._handleDirectory(item.directory, item.base); + } + } + _handleDirectory(directory, base) { + try { + const entries = this._scandir(directory, this._settings.fsScandirSettings); + for (const entry of entries) { + this._handleEntry(entry, base); + } + } + catch (error) { + this._handleError(error); + } + } + _handleError(error) { + if (!common.isFatalError(this._settings, error)) { + return; + } + throw error; + } + _handleEntry(entry, base) { + const fullpath = entry.path; + if (base !== undefined) { + entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); + } + if (common.isAppliedFilter(this._settings.entryFilter, entry)) { + this._pushToStorage(entry); + } + if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { + this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); + } + } + _pushToStorage(entry) { + this._storage.push(entry); + } +} +exports.default = SyncReader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts new file mode 100644 index 0000000..d1c4b45 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts @@ -0,0 +1,30 @@ +import * as fsScandir from '@nodelib/fs.scandir'; +import type { Entry, Errno } from './types'; +export declare type FilterFunction = (value: T) => boolean; +export declare type DeepFilterFunction = FilterFunction; +export declare type EntryFilterFunction = FilterFunction; +export declare type ErrorFilterFunction = FilterFunction; +export interface Options { + basePath?: string; + concurrency?: number; + deepFilter?: DeepFilterFunction; + entryFilter?: EntryFilterFunction; + errorFilter?: ErrorFilterFunction; + followSymbolicLinks?: boolean; + fs?: Partial; + pathSegmentSeparator?: string; + stats?: boolean; + throwErrorOnBrokenSymbolicLink?: boolean; +} +export default class Settings { + private readonly _options; + readonly basePath?: string; + readonly concurrency: number; + readonly deepFilter: DeepFilterFunction | null; + readonly entryFilter: EntryFilterFunction | null; + readonly errorFilter: ErrorFilterFunction | null; + readonly pathSegmentSeparator: string; + readonly fsScandirSettings: fsScandir.Settings; + constructor(_options?: Options); + private _getValue; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js new file mode 100644 index 0000000..d7a85c8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsScandir = require("@nodelib/fs.scandir"); +class Settings { + constructor(_options = {}) { + this._options = _options; + this.basePath = this._getValue(this._options.basePath, undefined); + this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); + this.deepFilter = this._getValue(this._options.deepFilter, null); + this.entryFilter = this._getValue(this._options.entryFilter, null); + this.errorFilter = this._getValue(this._options.errorFilter, null); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); + this.fsScandirSettings = new fsScandir.Settings({ + followSymbolicLinks: this._options.followSymbolicLinks, + fs: this._options.fs, + pathSegmentSeparator: this._options.pathSegmentSeparator, + stats: this._options.stats, + throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink + }); + } + _getValue(option, value) { + return option !== null && option !== void 0 ? option : value; + } +} +exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts new file mode 100644 index 0000000..6ee9bd3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts @@ -0,0 +1,8 @@ +/// +import type * as scandir from '@nodelib/fs.scandir'; +export declare type Entry = scandir.Entry; +export declare type Errno = NodeJS.ErrnoException; +export interface QueueItem { + directory: string; + base?: string; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json new file mode 100644 index 0000000..86bfce4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json @@ -0,0 +1,44 @@ +{ + "name": "@nodelib/fs.walk", + "version": "1.2.8", + "description": "A library for efficiently walking a directory recursively", + "license": "MIT", + "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", + "keywords": [ + "NodeLib", + "fs", + "FileSystem", + "file system", + "walk", + "scanner", + "crawler" + ], + "engines": { + "node": ">= 8" + }, + "files": [ + "out/**", + "!out/**/*.map", + "!out/**/*.spec.*", + "!out/**/tests/**" + ], + "main": "out/index.js", + "typings": "out/index.d.ts", + "scripts": { + "clean": "rimraf {tsconfig.tsbuildinfo,out}", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc -b .", + "compile:watch": "tsc -p . --watch --sourceMap", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile:watch" + }, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "devDependencies": { + "@nodelib/fs.macchiato": "1.0.4" + }, + "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md new file mode 100644 index 0000000..f59dd60 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md @@ -0,0 +1,586 @@ +# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) + +> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save braces +``` + +## v3.0.0 Released!! + +See the [changelog](CHANGELOG.md) for details. + +## Why use braces? + +Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. + +- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) +- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. +- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. +- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). +- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). +- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` +- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` +- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` +- [Supports escaping](#escaping) - To prevent evaluation of special characters. + +## Usage + +The main export is a function that takes one or more brace `patterns` and `options`. + +```js +const braces = require('braces'); +// braces(patterns[, options]); + +console.log(braces(['{01..05}', '{a..e}'])); +//=> ['(0[1-5])', '([a-e])'] + +console.log(braces(['{01..05}', '{a..e}'], { expand: true })); +//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] +``` + +### Brace Expansion vs. Compilation + +By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. + +**Compiled** + +```js +console.log(braces('a/{x,y,z}/b')); +//=> ['a/(x|y|z)/b'] +console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); +//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] +``` + +**Expanded** + +Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): + +```js +console.log(braces('a/{x,y,z}/b', { expand: true })); +//=> ['a/x/b', 'a/y/b', 'a/z/b'] + +console.log(braces.expand('{01..10}')); +//=> ['01','02','03','04','05','06','07','08','09','10'] +``` + +### Lists + +Expand lists (like Bash "sets"): + +```js +console.log(braces('a/{foo,bar,baz}/*.js')); +//=> ['a/(foo|bar|baz)/*.js'] + +console.log(braces.expand('a/{foo,bar,baz}/*.js')); +//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] +``` + +### Sequences + +Expand ranges of characters (like Bash "sequences"): + +```js +console.log(braces.expand('{1..3}')); // ['1', '2', '3'] +console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] +console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] +console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] + +// supports zero-padded ranges +console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] +console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] +``` + +See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. + +### Steppped ranges + +Steps, or increments, may be used with ranges: + +```js +console.log(braces.expand('{2..10..2}')); +//=> ['2', '4', '6', '8', '10'] + +console.log(braces('{2..10..2}')); +//=> ['(2|4|6|8|10)'] +``` + +When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. + +### Nesting + +Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. + +**"Expanded" braces** + +```js +console.log(braces.expand('a{b,c,/{x,y}}/e')); +//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] + +console.log(braces.expand('a/{x,{1..5},y}/c')); +//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] +``` + +**"Optimized" braces** + +```js +console.log(braces('a{b,c,/{x,y}}/e')); +//=> ['a(b|c|/(x|y))/e'] + +console.log(braces('a/{x,{1..5},y}/c')); +//=> ['a/(x|([1-5])|y)/c'] +``` + +### Escaping + +**Escaping braces** + +A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: + +```js +console.log(braces.expand('a\\{d,c,b}e')); +//=> ['a{d,c,b}e'] + +console.log(braces.expand('a{d,c,b\\}e')); +//=> ['a{d,c,b}e'] +``` + +**Escaping commas** + +Commas inside braces may also be escaped: + +```js +console.log(braces.expand('a{b\\,c}d')); +//=> ['a{b,c}d'] + +console.log(braces.expand('a{d\\,c,b}e')); +//=> ['ad,ce', 'abe'] +``` + +**Single items** + +Following bash conventions, a brace pattern is also not expanded when it contains a single character: + +```js +console.log(braces.expand('a{b}c')); +//=> ['a{b}c'] +``` + +## Options + +### options.maxLength + +**Type**: `Number` + +**Default**: `10,000` + +**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. + +```js +console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error +``` + +### options.expand + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). + +```js +console.log(braces('a/{b,c}/d', { expand: true })); +//=> [ 'a/b/d', 'a/c/d' ] +``` + +### options.nodupes + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Remove duplicates from the returned array. + +### options.rangeLimit + +**Type**: `Number` + +**Default**: `1000` + +**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. + +You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. + +**Examples** + +```js +// pattern exceeds the "rangeLimit", so it's optimized automatically +console.log(braces.expand('{1..1000}')); +//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] + +// pattern does not exceed "rangeLimit", so it's NOT optimized +console.log(braces.expand('{1..100}')); +//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] +``` + +### options.transform + +**Type**: `Function` + +**Default**: `undefined` + +**Description**: Customize range expansion. + +**Example: Transforming non-numeric values** + +```js +const alpha = braces.expand('x/{a..e}/y', { + transform(value, index) { + // When non-numeric values are passed, "value" is a character code. + return 'foo/' + String.fromCharCode(value) + '-' + index; + }, +}); +console.log(alpha); +//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] +``` + +**Example: Transforming numeric values** + +```js +const numeric = braces.expand('{1..5}', { + transform(value) { + // when numeric values are passed, "value" is a number + return 'foo/' + value * 2; + }, +}); +console.log(numeric); +//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] +``` + +### options.quantifiers + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. + +Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) + +The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. + +**Examples** + +```js +const braces = require('braces'); +console.log(braces('a/b{1,3}/{x,y,z}')); +//=> [ 'a/b(1|3)/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); +//=> [ 'a/b{1,3}/(x|y|z)' ] +console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); +//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] +``` + +### options.keepEscaping + +**Type**: `Boolean` + +**Default**: `undefined` + +**Description**: Do not strip backslashes that were used for escaping from the result. + +## What is "brace expansion"? + +Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). + +In addition to "expansion", braces are also used for matching. In other words: + +- [brace expansion](#brace-expansion) is for generating new lists +- [brace matching](#brace-matching) is for filtering existing lists + +
+More about brace expansion (click to expand) + +There are two main types of brace expansion: + +1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` +2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". + +Here are some example brace patterns to illustrate how they work: + +**Sets** + +``` +{a,b,c} => a b c +{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 +``` + +**Sequences** + +``` +{1..9} => 1 2 3 4 5 6 7 8 9 +{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 +{1..20..3} => 1 4 7 10 13 16 19 +{a..j} => a b c d e f g h i j +{j..a} => j i h g f e d c b a +{a..z..3} => a d g j m p s v y +``` + +**Combination** + +Sets and sequences can be mixed together or used along with any other strings. + +``` +{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 +foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar +``` + +The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. + +## Brace matching + +In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. + +For example, the pattern `foo/{1..3}/bar` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +``` + +But not: + +``` +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +## Brace matching pitfalls + +Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. + +### tldr + +**"brace bombs"** + +- brace expansion can eat up a huge amount of processing resources +- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially +- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) + +For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. + +### The solution + +Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. + +### Geometric complexity + +At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. + +For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: + +``` +{1,2}{3,4} => (2X2) => 13 14 23 24 +{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 +``` + +But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: + +``` +{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 + 249 257 258 259 267 268 269 347 348 349 357 + 358 359 367 368 369 +``` + +Now, imagine how this complexity grows given that each element is a n-tuple: + +``` +{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) +{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) +``` + +Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. + +**More information** + +Interested in learning more about brace expansion? + +- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) +- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) +- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) + +
+ +## Performance + +Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. + +### Better algorithms + +Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. + +Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. + +**The proof is in the numbers** + +Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------------------- | ------------------- | ---------------------------- | +| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | +| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | +| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | +| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | +| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | +| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | +| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | +| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | +| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | +| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | +| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | +| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | +| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | +| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | +| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | +| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | +| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | + +### Faster algorithms + +When you need expansion, braces is still much faster. + +_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ + +| **Pattern** | **braces** | **[minimatch][]** | +| --------------- | --------------------------- | ---------------------------- | +| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | +| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | +| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | +| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | +| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | +| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | +| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | +| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | + +If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). + +## Benchmarks + +### Running benchmarks + +Install dev dependencies: + +```bash +npm i -d && npm benchmark +``` + +### Latest results + +Braces is more accurate, without sacrificing performance. + +```bash +● expand - range (expanded) + braces x 53,167 ops/sec ±0.12% (102 runs sampled) + minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) +● expand - range (optimized for regex) + braces x 373,442 ops/sec ±0.04% (100 runs sampled) + minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) +● expand - nested ranges (expanded) + braces x 33,921 ops/sec ±0.09% (99 runs sampled) + minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) +● expand - nested ranges (optimized for regex) + braces x 287,479 ops/sec ±0.52% (98 runs sampled) + minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) +● expand - set (expanded) + braces x 238,243 ops/sec ±0.19% (97 runs sampled) + minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) +● expand - set (optimized for regex) + braces x 321,844 ops/sec ±0.10% (97 runs sampled) + minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) +● expand - nested sets (expanded) + braces x 165,371 ops/sec ±0.42% (96 runs sampled) + minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) +● expand - nested sets (optimized for regex) + braces x 242,948 ops/sec ±0.12% (99 runs sampled) + minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| ----------- | ------------------------------------------------------------- | +| 197 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [doowb](https://github.com/doowb) | +| 1 | [es128](https://github.com/es128) | +| 1 | [eush77](https://github.com/eush77) | +| 1 | [hemanth](https://github.com/hemanth) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +- [GitHub Profile](https://github.com/jonschlinkert) +- [Twitter Profile](https://twitter.com/jonschlinkert) +- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +--- + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js new file mode 100644 index 0000000..d222c13 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js @@ -0,0 +1,170 @@ +'use strict'; + +const stringify = require('./lib/stringify'); +const compile = require('./lib/compile'); +const expand = require('./lib/expand'); +const parse = require('./lib/parse'); + +/** + * Expand the given pattern or create a regex-compatible string. + * + * ```js + * const braces = require('braces'); + * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] + * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {String} + * @api public + */ + +const braces = (input, options = {}) => { + let output = []; + + if (Array.isArray(input)) { + for (const pattern of input) { + const result = braces.create(pattern, options); + if (Array.isArray(result)) { + output.push(...result); + } else { + output.push(result); + } + } + } else { + output = [].concat(braces.create(input, options)); + } + + if (options && options.expand === true && options.nodupes === true) { + output = [...new Set(output)]; + } + return output; +}; + +/** + * Parse the given `str` with the given `options`. + * + * ```js + * // braces.parse(pattern, [, options]); + * const ast = braces.parse('a/{b,c}/d'); + * console.log(ast); + * ``` + * @param {String} pattern Brace pattern to parse + * @param {Object} options + * @return {Object} Returns an AST + * @api public + */ + +braces.parse = (input, options = {}) => parse(input, options); + +/** + * Creates a braces string from an AST, or an AST node. + * + * ```js + * const braces = require('braces'); + * let ast = braces.parse('foo/{a,b}/bar'); + * console.log(stringify(ast.nodes[2])); //=> '{a,b}' + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.stringify = (input, options = {}) => { + if (typeof input === 'string') { + return stringify(braces.parse(input, options), options); + } + return stringify(input, options); +}; + +/** + * Compiles a brace pattern into a regex-compatible, optimized string. + * This method is called by the main [braces](#braces) function by default. + * + * ```js + * const braces = require('braces'); + * console.log(braces.compile('a/{b,c}/d')); + * //=> ['a/(b|c)/d'] + * ``` + * @param {String} `input` Brace pattern or AST. + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.compile = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + return compile(input, options); +}; + +/** + * Expands a brace pattern into an array. This method is called by the + * main [braces](#braces) function when `options.expand` is true. Before + * using this method it's recommended that you read the [performance notes](#performance)) + * and advantages of using [.compile](#compile) instead. + * + * ```js + * const braces = require('braces'); + * console.log(braces.expand('a/{b,c}/d')); + * //=> ['a/b/d', 'a/c/d']; + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.expand = (input, options = {}) => { + if (typeof input === 'string') { + input = braces.parse(input, options); + } + + let result = expand(input, options); + + // filter out empty strings if specified + if (options.noempty === true) { + result = result.filter(Boolean); + } + + // filter out duplicates if specified + if (options.nodupes === true) { + result = [...new Set(result)]; + } + + return result; +}; + +/** + * Processes a brace pattern and returns either an expanded array + * (if `options.expand` is true), a highly optimized regex-compatible string. + * This method is called by the main [braces](#braces) function. + * + * ```js + * const braces = require('braces'); + * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) + * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' + * ``` + * @param {String} `pattern` Brace pattern + * @param {Object} `options` + * @return {Array} Returns an array of expanded values. + * @api public + */ + +braces.create = (input, options = {}) => { + if (input === '' || input.length < 3) { + return [input]; + } + + return options.expand !== true + ? braces.compile(input, options) + : braces.expand(input, options); +}; + +/** + * Expose "braces" + */ + +module.exports = braces; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js new file mode 100644 index 0000000..dce69be --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js @@ -0,0 +1,60 @@ +'use strict'; + +const fill = require('fill-range'); +const utils = require('./utils'); + +const compile = (ast, options = {}) => { + const walk = (node, parent = {}) => { + const invalidBlock = utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + const invalid = invalidBlock === true || invalidNode === true; + const prefix = options.escapeInvalid === true ? '\\' : ''; + let output = ''; + + if (node.isOpen === true) { + return prefix + node.value; + } + + if (node.isClose === true) { + console.log('node.isClose', prefix, node.value); + return prefix + node.value; + } + + if (node.type === 'open') { + return invalid ? prefix + node.value : '('; + } + + if (node.type === 'close') { + return invalid ? prefix + node.value : ')'; + } + + if (node.type === 'comma') { + return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; + } + + if (node.value) { + return node.value; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); + + if (range.length !== 0) { + return args.length > 1 && range.length > 1 ? `(${range})` : range; + } + } + + if (node.nodes) { + for (const child of node.nodes) { + output += walk(child, node); + } + } + + return output; + }; + + return walk(ast); +}; + +module.exports = compile; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js new file mode 100644 index 0000000..2bb3b88 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js @@ -0,0 +1,57 @@ +'use strict'; + +module.exports = { + MAX_LENGTH: 10000, + + // Digits + CHAR_0: '0', /* 0 */ + CHAR_9: '9', /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 'A', /* A */ + CHAR_LOWERCASE_A: 'a', /* a */ + CHAR_UPPERCASE_Z: 'Z', /* Z */ + CHAR_LOWERCASE_Z: 'z', /* z */ + + CHAR_LEFT_PARENTHESES: '(', /* ( */ + CHAR_RIGHT_PARENTHESES: ')', /* ) */ + + CHAR_ASTERISK: '*', /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: '&', /* & */ + CHAR_AT: '@', /* @ */ + CHAR_BACKSLASH: '\\', /* \ */ + CHAR_BACKTICK: '`', /* ` */ + CHAR_CARRIAGE_RETURN: '\r', /* \r */ + CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ + CHAR_COLON: ':', /* : */ + CHAR_COMMA: ',', /* , */ + CHAR_DOLLAR: '$', /* . */ + CHAR_DOT: '.', /* . */ + CHAR_DOUBLE_QUOTE: '"', /* " */ + CHAR_EQUAL: '=', /* = */ + CHAR_EXCLAMATION_MARK: '!', /* ! */ + CHAR_FORM_FEED: '\f', /* \f */ + CHAR_FORWARD_SLASH: '/', /* / */ + CHAR_HASH: '#', /* # */ + CHAR_HYPHEN_MINUS: '-', /* - */ + CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ + CHAR_LEFT_CURLY_BRACE: '{', /* { */ + CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ + CHAR_LINE_FEED: '\n', /* \n */ + CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ + CHAR_PERCENT: '%', /* % */ + CHAR_PLUS: '+', /* + */ + CHAR_QUESTION_MARK: '?', /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ + CHAR_RIGHT_CURLY_BRACE: '}', /* } */ + CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ + CHAR_SEMICOLON: ';', /* ; */ + CHAR_SINGLE_QUOTE: '\'', /* ' */ + CHAR_SPACE: ' ', /* */ + CHAR_TAB: '\t', /* \t */ + CHAR_UNDERSCORE: '_', /* _ */ + CHAR_VERTICAL_LINE: '|', /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js new file mode 100644 index 0000000..35b2c41 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js @@ -0,0 +1,113 @@ +'use strict'; + +const fill = require('fill-range'); +const stringify = require('./stringify'); +const utils = require('./utils'); + +const append = (queue = '', stash = '', enclose = false) => { + const result = []; + + queue = [].concat(queue); + stash = [].concat(stash); + + if (!stash.length) return queue; + if (!queue.length) { + return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; + } + + for (const item of queue) { + if (Array.isArray(item)) { + for (const value of item) { + result.push(append(value, stash, enclose)); + } + } else { + for (let ele of stash) { + if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; + result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); + } + } + } + return utils.flatten(result); +}; + +const expand = (ast, options = {}) => { + const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; + + const walk = (node, parent = {}) => { + node.queue = []; + + let p = parent; + let q = parent.queue; + + while (p.type !== 'brace' && p.type !== 'root' && p.parent) { + p = p.parent; + q = p.queue; + } + + if (node.invalid || node.dollar) { + q.push(append(q.pop(), stringify(node, options))); + return; + } + + if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { + q.push(append(q.pop(), ['{}'])); + return; + } + + if (node.nodes && node.ranges > 0) { + const args = utils.reduce(node.nodes); + + if (utils.exceedsLimit(...args, options.step, rangeLimit)) { + throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); + } + + let range = fill(...args, options); + if (range.length === 0) { + range = stringify(node, options); + } + + q.push(append(q.pop(), range)); + node.nodes = []; + return; + } + + const enclose = utils.encloseBrace(node); + let queue = node.queue; + let block = node; + + while (block.type !== 'brace' && block.type !== 'root' && block.parent) { + block = block.parent; + queue = block.queue; + } + + for (let i = 0; i < node.nodes.length; i++) { + const child = node.nodes[i]; + + if (child.type === 'comma' && node.type === 'brace') { + if (i === 1) queue.push(''); + queue.push(''); + continue; + } + + if (child.type === 'close') { + q.push(append(q.pop(), queue, enclose)); + continue; + } + + if (child.value && child.type !== 'open') { + queue.push(append(queue.pop(), child.value)); + continue; + } + + if (child.nodes) { + walk(child, node); + } + } + + return queue; + }; + + return utils.flatten(walk(ast)); +}; + +module.exports = expand; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js new file mode 100644 index 0000000..3a6988e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js @@ -0,0 +1,331 @@ +'use strict'; + +const stringify = require('./stringify'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + CHAR_BACKSLASH, /* \ */ + CHAR_BACKTICK, /* ` */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_RIGHT_SQUARE_BRACKET, /* ] */ + CHAR_DOUBLE_QUOTE, /* " */ + CHAR_SINGLE_QUOTE, /* ' */ + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE +} = require('./constants'); + +/** + * parse + */ + +const parse = (input, options = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + const opts = options || {}; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + if (input.length > max) { + throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); + } + + const ast = { type: 'root', input, nodes: [] }; + const stack = [ast]; + let block = ast; + let prev = ast; + let brackets = 0; + const length = input.length; + let index = 0; + let depth = 0; + let value; + + /** + * Helpers + */ + + const advance = () => input[index++]; + const push = node => { + if (node.type === 'text' && prev.type === 'dot') { + prev.type = 'text'; + } + + if (prev && prev.type === 'text' && node.type === 'text') { + prev.value += node.value; + return; + } + + block.nodes.push(node); + node.parent = block; + node.prev = prev; + prev = node; + return node; + }; + + push({ type: 'bos' }); + + while (index < length) { + block = stack[stack.length - 1]; + value = advance(); + + /** + * Invalid chars + */ + + if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { + continue; + } + + /** + * Escaped chars + */ + + if (value === CHAR_BACKSLASH) { + push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); + continue; + } + + /** + * Right square bracket (literal): ']' + */ + + if (value === CHAR_RIGHT_SQUARE_BRACKET) { + push({ type: 'text', value: '\\' + value }); + continue; + } + + /** + * Left square bracket: '[' + */ + + if (value === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + + let next; + + while (index < length && (next = advance())) { + value += next; + + if (next === CHAR_LEFT_SQUARE_BRACKET) { + brackets++; + continue; + } + + if (next === CHAR_BACKSLASH) { + value += advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + brackets--; + + if (brackets === 0) { + break; + } + } + } + + push({ type: 'text', value }); + continue; + } + + /** + * Parentheses + */ + + if (value === CHAR_LEFT_PARENTHESES) { + block = push({ type: 'paren', nodes: [] }); + stack.push(block); + push({ type: 'text', value }); + continue; + } + + if (value === CHAR_RIGHT_PARENTHESES) { + if (block.type !== 'paren') { + push({ type: 'text', value }); + continue; + } + block = stack.pop(); + push({ type: 'text', value }); + block = stack[stack.length - 1]; + continue; + } + + /** + * Quotes: '|"|` + */ + + if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { + const open = value; + let next; + + if (options.keepQuotes !== true) { + value = ''; + } + + while (index < length && (next = advance())) { + if (next === CHAR_BACKSLASH) { + value += next + advance(); + continue; + } + + if (next === open) { + if (options.keepQuotes === true) value += next; + break; + } + + value += next; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Left curly brace: '{' + */ + + if (value === CHAR_LEFT_CURLY_BRACE) { + depth++; + + const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; + const brace = { + type: 'brace', + open: true, + close: false, + dollar, + depth, + commas: 0, + ranges: 0, + nodes: [] + }; + + block = push(brace); + stack.push(block); + push({ type: 'open', value }); + continue; + } + + /** + * Right curly brace: '}' + */ + + if (value === CHAR_RIGHT_CURLY_BRACE) { + if (block.type !== 'brace') { + push({ type: 'text', value }); + continue; + } + + const type = 'close'; + block = stack.pop(); + block.close = true; + + push({ type, value }); + depth--; + + block = stack[stack.length - 1]; + continue; + } + + /** + * Comma: ',' + */ + + if (value === CHAR_COMMA && depth > 0) { + if (block.ranges > 0) { + block.ranges = 0; + const open = block.nodes.shift(); + block.nodes = [open, { type: 'text', value: stringify(block) }]; + } + + push({ type: 'comma', value }); + block.commas++; + continue; + } + + /** + * Dot: '.' + */ + + if (value === CHAR_DOT && depth > 0 && block.commas === 0) { + const siblings = block.nodes; + + if (depth === 0 || siblings.length === 0) { + push({ type: 'text', value }); + continue; + } + + if (prev.type === 'dot') { + block.range = []; + prev.value += value; + prev.type = 'range'; + + if (block.nodes.length !== 3 && block.nodes.length !== 5) { + block.invalid = true; + block.ranges = 0; + prev.type = 'text'; + continue; + } + + block.ranges++; + block.args = []; + continue; + } + + if (prev.type === 'range') { + siblings.pop(); + + const before = siblings[siblings.length - 1]; + before.value += prev.value + value; + prev = before; + block.ranges--; + continue; + } + + push({ type: 'dot', value }); + continue; + } + + /** + * Text + */ + + push({ type: 'text', value }); + } + + // Mark imbalanced braces and brackets as invalid + do { + block = stack.pop(); + + if (block.type !== 'root') { + block.nodes.forEach(node => { + if (!node.nodes) { + if (node.type === 'open') node.isOpen = true; + if (node.type === 'close') node.isClose = true; + if (!node.nodes) node.type = 'text'; + node.invalid = true; + } + }); + + // get the location of the block on parent.nodes (block's siblings) + const parent = stack[stack.length - 1]; + const index = parent.nodes.indexOf(block); + // replace the (invalid) block with it's nodes + parent.nodes.splice(index, 1, ...block.nodes); + } + } while (stack.length > 0); + + push({ type: 'eos' }); + return ast; +}; + +module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js new file mode 100644 index 0000000..8bcf872 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js @@ -0,0 +1,32 @@ +'use strict'; + +const utils = require('./utils'); + +module.exports = (ast, options = {}) => { + const stringify = (node, parent = {}) => { + const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); + const invalidNode = node.invalid === true && options.escapeInvalid === true; + let output = ''; + + if (node.value) { + if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { + return '\\' + node.value; + } + return node.value; + } + + if (node.value) { + return node.value; + } + + if (node.nodes) { + for (const child of node.nodes) { + output += stringify(child); + } + } + return output; + }; + + return stringify(ast); +}; + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js new file mode 100644 index 0000000..d19311f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js @@ -0,0 +1,122 @@ +'use strict'; + +exports.isInteger = num => { + if (typeof num === 'number') { + return Number.isInteger(num); + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isInteger(Number(num)); + } + return false; +}; + +/** + * Find a node of the given type + */ + +exports.find = (node, type) => node.nodes.find(node => node.type === type); + +/** + * Find a node of the given type + */ + +exports.exceedsLimit = (min, max, step = 1, limit) => { + if (limit === false) return false; + if (!exports.isInteger(min) || !exports.isInteger(max)) return false; + return ((Number(max) - Number(min)) / Number(step)) >= limit; +}; + +/** + * Escape the given node with '\\' before node.value + */ + +exports.escapeNode = (block, n = 0, type) => { + const node = block.nodes[n]; + if (!node) return; + + if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { + if (node.escaped !== true) { + node.value = '\\' + node.value; + node.escaped = true; + } + } +}; + +/** + * Returns true if the given brace node should be enclosed in literal braces + */ + +exports.encloseBrace = node => { + if (node.type !== 'brace') return false; + if ((node.commas >> 0 + node.ranges >> 0) === 0) { + node.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a brace node is invalid. + */ + +exports.isInvalidBrace = block => { + if (block.type !== 'brace') return false; + if (block.invalid === true || block.dollar) return true; + if ((block.commas >> 0 + block.ranges >> 0) === 0) { + block.invalid = true; + return true; + } + if (block.open !== true || block.close !== true) { + block.invalid = true; + return true; + } + return false; +}; + +/** + * Returns true if a node is an open or close node + */ + +exports.isOpenOrClose = node => { + if (node.type === 'open' || node.type === 'close') { + return true; + } + return node.open === true || node.close === true; +}; + +/** + * Reduce an array of text nodes. + */ + +exports.reduce = nodes => nodes.reduce((acc, node) => { + if (node.type === 'text') acc.push(node.value); + if (node.type === 'range') node.type = 'text'; + return acc; +}, []); + +/** + * Flatten an array + */ + +exports.flatten = (...args) => { + const result = []; + + const flat = arr => { + for (let i = 0; i < arr.length; i++) { + const ele = arr[i]; + + if (Array.isArray(ele)) { + flat(ele); + continue; + } + + if (ele !== undefined) { + result.push(ele); + } + } + return result; + }; + + flat(args); + return result; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json new file mode 100644 index 0000000..c3c056e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json @@ -0,0 +1,77 @@ +{ + "name": "braces", + "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", + "version": "3.0.3", + "homepage": "https://github.com/micromatch/braces", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Elan Shanker (https://github.com/es128)", + "Eugene Sharygin (https://github.com/eush77)", + "hemanth.hm (http://h3manth.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/braces", + "bugs": { + "url": "https://github.com/micromatch/braces/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "mocha", + "benchmark": "node benchmark" + }, + "dependencies": { + "fill-range": "^7.1.1" + }, + "devDependencies": { + "ansi-colors": "^3.2.4", + "bash-path": "^2.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1" + }, + "keywords": [ + "alpha", + "alphabetical", + "bash", + "brace", + "braces", + "expand", + "expansion", + "filepath", + "fill", + "fs", + "glob", + "globbing", + "letter", + "match", + "matches", + "matching", + "number", + "numerical", + "path", + "range", + "ranges", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "lint": { + "reflinks": true + }, + "plugins": [ + "gulp-format-md" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE new file mode 100644 index 0000000..8407b9a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Made With MOXY Lda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md new file mode 100644 index 0000000..1ed9252 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md @@ -0,0 +1,89 @@ +# cross-spawn + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][ci-image]][ci-url] [![Build status][appveyor-image]][appveyor-url] + +[npm-url]:https://npmjs.org/package/cross-spawn +[downloads-image]:https://img.shields.io/npm/dm/cross-spawn.svg +[npm-image]:https://img.shields.io/npm/v/cross-spawn.svg +[ci-url]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml +[ci-image]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml/badge.svg +[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn +[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg + +A cross platform solution to node's spawn and spawnSync. + +## Installation + +Node.js version 8 and up: +`$ npm install cross-spawn` + +Node.js version 7 and under: +`$ npm install cross-spawn@6` + +## Why + +Node has issues when using spawn on Windows: + +- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318) +- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix)) +- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367) +- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`) +- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149) +- No `options.shell` support on node `` where `` must not contain any arguments. +If you would like to have the shebang support improved, feel free to contribute via a pull-request. + +Remember to always test your code on Windows! + + +## Tests + +`$ npm test` +`$ npm test -- --watch` during development + + +## License + +Released under the [MIT License](https://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js new file mode 100644 index 0000000..5509742 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js @@ -0,0 +1,39 @@ +'use strict'; + +const cp = require('child_process'); +const parse = require('./lib/parse'); +const enoent = require('./lib/enoent'); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js new file mode 100644 index 0000000..da33471 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js @@ -0,0 +1,59 @@ +'use strict'; + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} + +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } + + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} + +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } + + return null; +} + +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } + + return null; +} + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js new file mode 100644 index 0000000..0129d74 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js @@ -0,0 +1,91 @@ +'use strict'; + +const path = require('path'); +const resolveCommand = require('./util/resolveCommand'); +const escape = require('./util/escape'); +const readShebang = require('./util/readShebang'); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } + + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); + + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } + + return parsed; +} + +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } + + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; + + // Delegate further parsing to shell or non-shell + return options.shell ? parsed : parseNonShell(parsed); +} + +module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js new file mode 100644 index 0000000..7bf2905 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js @@ -0,0 +1,47 @@ +'use strict'; + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input + // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js new file mode 100644 index 0000000..5e83733 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js @@ -0,0 +1,23 @@ +'use strict'; + +const fs = require('fs'); +const shebangCommand = require('shebang-command'); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + const buffer = Buffer.alloc(size); + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js new file mode 100644 index 0000000..7972455 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js @@ -0,0 +1,52 @@ +'use strict'; + +const path = require('path'); +const which = require('which'); +const getPathKey = require('path-key'); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const env = parsed.options.env || process.env; + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + // Worker threads do not have process.chdir() + const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (shouldSwitchCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } + + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: env[getPathKey({ env })], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + if (shouldSwitchCwd) { + process.chdir(cwd); + } + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; +} + +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} + +module.exports = resolveCommand; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json new file mode 100644 index 0000000..24b2eb4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json @@ -0,0 +1,73 @@ +{ + "name": "cross-spawn", + "version": "7.0.6", + "description": "Cross platform child_process#spawn and child_process#spawnSync", + "keywords": [ + "spawn", + "spawnSync", + "windows", + "cross-platform", + "path-ext", + "shebang", + "cmd", + "execute" + ], + "author": "André Cruz ", + "homepage": "https://github.com/moxystudio/node-cross-spawn", + "repository": { + "type": "git", + "url": "git@github.com:moxystudio/node-cross-spawn.git" + }, + "license": "MIT", + "main": "index.js", + "files": [ + "lib" + ], + "scripts": { + "lint": "eslint .", + "test": "jest --env node --coverage", + "prerelease": "npm t && npm run lint", + "release": "standard-version", + "postrelease": "git push --follow-tags origin HEAD && npm publish" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.js": [ + "eslint --fix", + "git add" + ] + }, + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "devDependencies": { + "@commitlint/cli": "^8.1.0", + "@commitlint/config-conventional": "^8.1.0", + "babel-core": "^6.26.3", + "babel-jest": "^24.9.0", + "babel-preset-moxy": "^3.1.0", + "eslint": "^5.16.0", + "eslint-config-moxy": "^7.1.0", + "husky": "^3.0.5", + "jest": "^24.9.0", + "lint-staged": "^9.2.5", + "mkdirp": "^0.5.1", + "rimraf": "^3.0.0", + "standard-version": "^9.5.0" + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts new file mode 100644 index 0000000..417d535 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts @@ -0,0 +1,564 @@ +/// +import {ChildProcess} from 'child_process'; +import {Stream, Readable as ReadableStream} from 'stream'; + +declare namespace execa { + type StdioOption = + | 'pipe' + | 'ipc' + | 'ignore' + | 'inherit' + | Stream + | number + | undefined; + + interface CommonOptions { + /** + Kill the spawned process when the parent process exits unless either: + - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) + - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit + + @default true + */ + readonly cleanup?: boolean; + + /** + Prefer locally installed binaries when looking for a binary to execute. + + If you `$ npm install foo`, you can then `execa('foo')`. + + @default false + */ + readonly preferLocal?: boolean; + + /** + Preferred path to find locally installed binaries in (use with `preferLocal`). + + @default process.cwd() + */ + readonly localDir?: string; + + /** + Path to the Node.js executable to use in child processes. + + This can be either an absolute path or a path relative to the `cwd` option. + + Requires `preferLocal` to be `true`. + + For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. + + @default process.execPath + */ + readonly execPath?: string; + + /** + Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected. + + If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data. + + @default true + */ + readonly buffer?: boolean; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stdin?: StdioOption; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stdout?: StdioOption; + + /** + Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + + @default 'pipe' + */ + readonly stderr?: StdioOption; + + /** + Setting this to `false` resolves the promise with the error instead of rejecting it. + + @default true + */ + readonly reject?: boolean; + + /** + Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved. + + @default false + */ + readonly all?: boolean; + + /** + Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. + + @default true + */ + readonly stripFinalNewline?: boolean; + + /** + Set to `false` if you don't want to extend the environment variables when providing the `env` property. + + @default true + */ + readonly extendEnv?: boolean; + + /** + Current working directory of the child process. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. + + @default process.env + */ + readonly env?: NodeJS.ProcessEnv; + + /** + Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. + */ + readonly argv0?: string; + + /** + Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. + + @default 'pipe' + */ + readonly stdio?: 'pipe' | 'ignore' | 'inherit' | readonly StdioOption[]; + + /** + Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execa.node()`: + - `json`: Uses `JSON.stringify()` and `JSON.parse()`. + - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) + + Requires Node.js `13.2.0` or later. + + [More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) + + @default 'json' + */ + readonly serialization?: 'json' | 'advanced'; + + /** + Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). + + @default false + */ + readonly detached?: boolean; + + /** + Sets the user identity of the process. + */ + readonly uid?: number; + + /** + Sets the group identity of the process. + */ + readonly gid?: number; + + /** + If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. + + We recommend against using this option since it is: + - not cross-platform, encouraging shell-specific syntax. + - slower, because of the additional shell interpretation. + - unsafe, potentially allowing command injection. + + @default false + */ + readonly shell?: boolean | string; + + /** + Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. + + @default 'utf8' + */ + readonly encoding?: EncodingType; + + /** + If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds. + + @default 0 + */ + readonly timeout?: number; + + /** + Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB. + + @default 100_000_000 + */ + readonly maxBuffer?: number; + + /** + Signal value to be used when the spawned process will be killed. + + @default 'SIGTERM' + */ + readonly killSignal?: string | number; + + /** + If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. + + @default false + */ + readonly windowsVerbatimArguments?: boolean; + + /** + On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. + + @default true + */ + readonly windowsHide?: boolean; + } + + interface Options extends CommonOptions { + /** + Write some input to the `stdin` of your binary. + */ + readonly input?: string | Buffer | ReadableStream; + } + + interface SyncOptions extends CommonOptions { + /** + Write some input to the `stdin` of your binary. + */ + readonly input?: string | Buffer; + } + + interface NodeOptions extends Options { + /** + The Node.js executable to use. + + @default process.execPath + */ + readonly nodePath?: string; + + /** + List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. + + @default process.execArgv + */ + readonly nodeOptions?: string[]; + } + + interface ExecaReturnBase { + /** + The file and arguments that were run, for logging purposes. + + This is not escaped and should not be executed directly as a process, including using `execa()` or `execa.command()`. + */ + command: string; + + /** + Same as `command` but escaped. + + This is meant to be copy and pasted into a shell, for debugging purposes. + Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execa.command()`. + */ + escapedCommand: string; + + /** + The numeric exit code of the process that was run. + */ + exitCode: number; + + /** + The output of the process on stdout. + */ + stdout: StdoutStderrType; + + /** + The output of the process on stderr. + */ + stderr: StdoutStderrType; + + /** + Whether the process failed to run. + */ + failed: boolean; + + /** + Whether the process timed out. + */ + timedOut: boolean; + + /** + Whether the process was killed. + */ + killed: boolean; + + /** + The name of the signal that was used to terminate the process. For example, `SIGFPE`. + + If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. + */ + signal?: string; + + /** + A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. + + If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. + */ + signalDescription?: string; + } + + interface ExecaSyncReturnValue + extends ExecaReturnBase { + } + + /** + Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. + + The child process fails when: + - its exit code is not `0` + - it was killed with a signal + - timing out + - being canceled + - there's not enough memory or there are already too many child processes + */ + interface ExecaReturnValue + extends ExecaSyncReturnValue { + /** + The output of the process with `stdout` and `stderr` interleaved. + + This is `undefined` if either: + - the `all` option is `false` (default value) + - `execa.sync()` was used + */ + all?: StdoutErrorType; + + /** + Whether the process was canceled. + */ + isCanceled: boolean; + } + + interface ExecaSyncError + extends Error, + ExecaReturnBase { + /** + Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored. + + The child process stderr then stdout are appended to the end, separated with newlines and not interleaved. + */ + message: string; + + /** + This is the same as the `message` property except it does not include the child process stdout/stderr. + */ + shortMessage: string; + + /** + Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. + + This is `undefined` unless the child process exited due to an `error` event or a timeout. + */ + originalMessage?: string; + } + + interface ExecaError + extends ExecaSyncError { + /** + The output of the process with `stdout` and `stderr` interleaved. + + This is `undefined` if either: + - the `all` option is `false` (default value) + - `execa.sync()` was used + */ + all?: StdoutErrorType; + + /** + Whether the process was canceled. + */ + isCanceled: boolean; + } + + interface KillOptions { + /** + Milliseconds to wait for the child process to terminate before sending `SIGKILL`. + + Can be disabled with `false`. + + @default 5000 + */ + forceKillAfterTimeout?: number | false; + } + + interface ExecaChildPromise { + /** + Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). + + This is `undefined` if either: + - the `all` option is `false` (the default value) + - both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) + */ + all?: ReadableStream; + + catch( + onRejected?: (reason: ExecaError) => ResultType | PromiseLike + ): Promise | ResultType>; + + /** + Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. + */ + kill(signal?: string, options?: KillOptions): void; + + /** + Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. + */ + cancel(): void; + } + + type ExecaChildProcess = ChildProcess & + ExecaChildPromise & + Promise>; +} + +declare const execa: { + /** + Execute a file. + + Think of this as a mix of `child_process.execFile` and `child_process.spawn`. + + @param file - The program/script to execute. + @param arguments - Arguments to pass to `file` on execution. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + + @example + ``` + import execa = require('execa'); + + (async () => { + const {stdout} = await execa('echo', ['unicorns']); + console.log(stdout); + //=> 'unicorns' + + // Cancelling a spawned process + + const subprocess = execa('node'); + + setTimeout(() => { + subprocess.cancel() + }, 1000); + + try { + await subprocess; + } catch (error) { + console.log(subprocess.killed); // true + console.log(error.isCanceled); // true + } + })(); + + // Pipe the child process stdout to the current stdout + execa('echo', ['unicorns']).stdout.pipe(process.stdout); + ``` + */ + ( + file: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + ( + file: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + (file: string, options?: execa.Options): execa.ExecaChildProcess; + (file: string, options?: execa.Options): execa.ExecaChildProcess< + Buffer + >; + + /** + Execute a file synchronously. + + This method throws an `Error` if the command fails. + + @param file - The program/script to execute. + @param arguments - Arguments to pass to `file` on execution. + @returns A result `Object` with `stdout` and `stderr` properties. + */ + sync( + file: string, + arguments?: readonly string[], + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + sync( + file: string, + arguments?: readonly string[], + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + sync(file: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + sync( + file: string, + options?: execa.SyncOptions + ): execa.ExecaSyncReturnValue; + + /** + Same as `execa()` except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. + + If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. + + The `shell` option must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. + + @param command - The program/script to execute and its arguments. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + + @example + ``` + import execa = require('execa'); + + (async () => { + const {stdout} = await execa.command('echo unicorns'); + console.log(stdout); + //=> 'unicorns' + })(); + ``` + */ + command(command: string, options?: execa.Options): execa.ExecaChildProcess; + command(command: string, options?: execa.Options): execa.ExecaChildProcess; + + /** + Same as `execa.command()` but synchronous. + + @param command - The program/script to execute and its arguments. + @returns A result `Object` with `stdout` and `stderr` properties. + */ + commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; + + /** + Execute a Node.js script as a child process. + + Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): + - the current Node version and options are used. This can be overridden using the `nodePath` and `nodeArguments` options. + - the `shell` option cannot be used + - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) + + @param scriptPath - Node.js script to execute. + @param arguments - Arguments to pass to `scriptPath` on execution. + @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. + */ + node( + scriptPath: string, + arguments?: readonly string[], + options?: execa.NodeOptions + ): execa.ExecaChildProcess; + node( + scriptPath: string, + arguments?: readonly string[], + options?: execa.Options + ): execa.ExecaChildProcess; + node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; + node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; +}; + +export = execa; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js new file mode 100644 index 0000000..6fc9f12 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js @@ -0,0 +1,268 @@ +'use strict'; +const path = require('path'); +const childProcess = require('child_process'); +const crossSpawn = require('cross-spawn'); +const stripFinalNewline = require('strip-final-newline'); +const npmRunPath = require('npm-run-path'); +const onetime = require('onetime'); +const makeError = require('./lib/error'); +const normalizeStdio = require('./lib/stdio'); +const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = require('./lib/kill'); +const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = require('./lib/stream'); +const {mergePromise, getSpawnedPromise} = require('./lib/promise'); +const {joinCommand, parseCommand, getEscapedCommand} = require('./lib/command'); + +const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100; + +const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => { + const env = extendEnv ? {...process.env, ...envOption} : envOption; + + if (preferLocal) { + return npmRunPath.env({env, cwd: localDir, execPath}); + } + + return env; +}; + +const handleArguments = (file, args, options = {}) => { + const parsed = crossSpawn._parse(file, args, options); + file = parsed.command; + args = parsed.args; + options = parsed.options; + + options = { + maxBuffer: DEFAULT_MAX_BUFFER, + buffer: true, + stripFinalNewline: true, + extendEnv: true, + preferLocal: false, + localDir: options.cwd || process.cwd(), + execPath: process.execPath, + encoding: 'utf8', + reject: true, + cleanup: true, + all: false, + windowsHide: true, + ...options + }; + + options.env = getEnv(options); + + options.stdio = normalizeStdio(options); + + if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') { + // #116 + args.unshift('/q'); + } + + return {file, args, options, parsed}; +}; + +const handleOutput = (options, value, error) => { + if (typeof value !== 'string' && !Buffer.isBuffer(value)) { + // When `execa.sync()` errors, we normalize it to '' to mimic `execa()` + return error === undefined ? undefined : ''; + } + + if (options.stripFinalNewline) { + return stripFinalNewline(value); + } + + return value; +}; + +const execa = (file, args, options) => { + const parsed = handleArguments(file, args, options); + const command = joinCommand(file, args); + const escapedCommand = getEscapedCommand(file, args); + + validateTimeout(parsed.options); + + let spawned; + try { + spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options); + } catch (error) { + // Ensure the returned error is always both a promise and a child process + const dummySpawned = new childProcess.ChildProcess(); + const errorPromise = Promise.reject(makeError({ + error, + stdout: '', + stderr: '', + all: '', + command, + escapedCommand, + parsed, + timedOut: false, + isCanceled: false, + killed: false + })); + return mergePromise(dummySpawned, errorPromise); + } + + const spawnedPromise = getSpawnedPromise(spawned); + const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise); + const processDone = setExitHandler(spawned, parsed.options, timedPromise); + + const context = {isCanceled: false}; + + spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned)); + spawned.cancel = spawnedCancel.bind(null, spawned, context); + + const handlePromise = async () => { + const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone); + const stdout = handleOutput(parsed.options, stdoutResult); + const stderr = handleOutput(parsed.options, stderrResult); + const all = handleOutput(parsed.options, allResult); + + if (error || exitCode !== 0 || signal !== null) { + const returnedError = makeError({ + error, + exitCode, + signal, + stdout, + stderr, + all, + command, + escapedCommand, + parsed, + timedOut, + isCanceled: context.isCanceled, + killed: spawned.killed + }); + + if (!parsed.options.reject) { + return returnedError; + } + + throw returnedError; + } + + return { + command, + escapedCommand, + exitCode: 0, + stdout, + stderr, + all, + failed: false, + timedOut: false, + isCanceled: false, + killed: false + }; + }; + + const handlePromiseOnce = onetime(handlePromise); + + handleInput(spawned, parsed.options.input); + + spawned.all = makeAllStream(spawned, parsed.options); + + return mergePromise(spawned, handlePromiseOnce); +}; + +module.exports = execa; + +module.exports.sync = (file, args, options) => { + const parsed = handleArguments(file, args, options); + const command = joinCommand(file, args); + const escapedCommand = getEscapedCommand(file, args); + + validateInputSync(parsed.options); + + let result; + try { + result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options); + } catch (error) { + throw makeError({ + error, + stdout: '', + stderr: '', + all: '', + command, + escapedCommand, + parsed, + timedOut: false, + isCanceled: false, + killed: false + }); + } + + const stdout = handleOutput(parsed.options, result.stdout, result.error); + const stderr = handleOutput(parsed.options, result.stderr, result.error); + + if (result.error || result.status !== 0 || result.signal !== null) { + const error = makeError({ + stdout, + stderr, + error: result.error, + signal: result.signal, + exitCode: result.status, + command, + escapedCommand, + parsed, + timedOut: result.error && result.error.code === 'ETIMEDOUT', + isCanceled: false, + killed: result.signal !== null + }); + + if (!parsed.options.reject) { + return error; + } + + throw error; + } + + return { + command, + escapedCommand, + exitCode: 0, + stdout, + stderr, + failed: false, + timedOut: false, + isCanceled: false, + killed: false + }; +}; + +module.exports.command = (command, options) => { + const [file, ...args] = parseCommand(command); + return execa(file, args, options); +}; + +module.exports.commandSync = (command, options) => { + const [file, ...args] = parseCommand(command); + return execa.sync(file, args, options); +}; + +module.exports.node = (scriptPath, args, options = {}) => { + if (args && !Array.isArray(args) && typeof args === 'object') { + options = args; + args = []; + } + + const stdio = normalizeStdio.node(options); + const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect')); + + const { + nodePath = process.execPath, + nodeOptions = defaultExecArgv + } = options; + + return execa( + nodePath, + [ + ...nodeOptions, + scriptPath, + ...(Array.isArray(args) ? args : []) + ], + { + ...options, + stdin: undefined, + stdout: undefined, + stderr: undefined, + stdio, + shell: false + } + ); +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js new file mode 100644 index 0000000..859b006 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js @@ -0,0 +1,52 @@ +'use strict'; +const normalizeArgs = (file, args = []) => { + if (!Array.isArray(args)) { + return [file]; + } + + return [file, ...args]; +}; + +const NO_ESCAPE_REGEXP = /^[\w.-]+$/; +const DOUBLE_QUOTES_REGEXP = /"/g; + +const escapeArg = arg => { + if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) { + return arg; + } + + return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`; +}; + +const joinCommand = (file, args) => { + return normalizeArgs(file, args).join(' '); +}; + +const getEscapedCommand = (file, args) => { + return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' '); +}; + +const SPACES_REGEXP = / +/g; + +// Handle `execa.command()` +const parseCommand = command => { + const tokens = []; + for (const token of command.trim().split(SPACES_REGEXP)) { + // Allow spaces to be escaped by a backslash if not meant as a delimiter + const previousToken = tokens[tokens.length - 1]; + if (previousToken && previousToken.endsWith('\\')) { + // Merge previous token with current one + tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`; + } else { + tokens.push(token); + } + } + + return tokens; +}; + +module.exports = { + joinCommand, + getEscapedCommand, + parseCommand +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js new file mode 100644 index 0000000..4214467 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js @@ -0,0 +1,88 @@ +'use strict'; +const {signalsByName} = require('human-signals'); + +const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => { + if (timedOut) { + return `timed out after ${timeout} milliseconds`; + } + + if (isCanceled) { + return 'was canceled'; + } + + if (errorCode !== undefined) { + return `failed with ${errorCode}`; + } + + if (signal !== undefined) { + return `was killed with ${signal} (${signalDescription})`; + } + + if (exitCode !== undefined) { + return `failed with exit code ${exitCode}`; + } + + return 'failed'; +}; + +const makeError = ({ + stdout, + stderr, + all, + error, + signal, + exitCode, + command, + escapedCommand, + timedOut, + isCanceled, + killed, + parsed: {options: {timeout}} +}) => { + // `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`. + // We normalize them to `undefined` + exitCode = exitCode === null ? undefined : exitCode; + signal = signal === null ? undefined : signal; + const signalDescription = signal === undefined ? undefined : signalsByName[signal].description; + + const errorCode = error && error.code; + + const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}); + const execaMessage = `Command ${prefix}: ${command}`; + const isError = Object.prototype.toString.call(error) === '[object Error]'; + const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage; + const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n'); + + if (isError) { + error.originalMessage = error.message; + error.message = message; + } else { + error = new Error(message); + } + + error.shortMessage = shortMessage; + error.command = command; + error.escapedCommand = escapedCommand; + error.exitCode = exitCode; + error.signal = signal; + error.signalDescription = signalDescription; + error.stdout = stdout; + error.stderr = stderr; + + if (all !== undefined) { + error.all = all; + } + + if ('bufferedData' in error) { + delete error.bufferedData; + } + + error.failed = true; + error.timedOut = Boolean(timedOut); + error.isCanceled = isCanceled; + error.killed = killed && !timedOut; + + return error; +}; + +module.exports = makeError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js new file mode 100644 index 0000000..287a142 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js @@ -0,0 +1,115 @@ +'use strict'; +const os = require('os'); +const onExit = require('signal-exit'); + +const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5; + +// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior +const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => { + const killResult = kill(signal); + setKillTimeout(kill, signal, options, killResult); + return killResult; +}; + +const setKillTimeout = (kill, signal, options, killResult) => { + if (!shouldForceKill(signal, options, killResult)) { + return; + } + + const timeout = getForceKillAfterTimeout(options); + const t = setTimeout(() => { + kill('SIGKILL'); + }, timeout); + + // Guarded because there's no `.unref()` when `execa` is used in the renderer + // process in Electron. This cannot be tested since we don't run tests in + // Electron. + // istanbul ignore else + if (t.unref) { + t.unref(); + } +}; + +const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => { + return isSigterm(signal) && forceKillAfterTimeout !== false && killResult; +}; + +const isSigterm = signal => { + return signal === os.constants.signals.SIGTERM || + (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM'); +}; + +const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => { + if (forceKillAfterTimeout === true) { + return DEFAULT_FORCE_KILL_TIMEOUT; + } + + if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) { + throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`); + } + + return forceKillAfterTimeout; +}; + +// `childProcess.cancel()` +const spawnedCancel = (spawned, context) => { + const killResult = spawned.kill(); + + if (killResult) { + context.isCanceled = true; + } +}; + +const timeoutKill = (spawned, signal, reject) => { + spawned.kill(signal); + reject(Object.assign(new Error('Timed out'), {timedOut: true, signal})); +}; + +// `timeout` option handling +const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => { + if (timeout === 0 || timeout === undefined) { + return spawnedPromise; + } + + let timeoutId; + const timeoutPromise = new Promise((resolve, reject) => { + timeoutId = setTimeout(() => { + timeoutKill(spawned, killSignal, reject); + }, timeout); + }); + + const safeSpawnedPromise = spawnedPromise.finally(() => { + clearTimeout(timeoutId); + }); + + return Promise.race([timeoutPromise, safeSpawnedPromise]); +}; + +const validateTimeout = ({timeout}) => { + if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) { + throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`); + } +}; + +// `cleanup` option handling +const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => { + if (!cleanup || detached) { + return timedPromise; + } + + const removeExitHandler = onExit(() => { + spawned.kill(); + }); + + return timedPromise.finally(() => { + removeExitHandler(); + }); +}; + +module.exports = { + spawnedKill, + spawnedCancel, + setupTimeout, + validateTimeout, + setExitHandler +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js new file mode 100644 index 0000000..bd9d523 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js @@ -0,0 +1,46 @@ +'use strict'; + +const nativePromisePrototype = (async () => {})().constructor.prototype; +const descriptors = ['then', 'catch', 'finally'].map(property => [ + property, + Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property) +]); + +// The return value is a mixin of `childProcess` and `Promise` +const mergePromise = (spawned, promise) => { + for (const [property, descriptor] of descriptors) { + // Starting the main `promise` is deferred to avoid consuming streams + const value = typeof promise === 'function' ? + (...args) => Reflect.apply(descriptor.value, promise(), args) : + descriptor.value.bind(promise); + + Reflect.defineProperty(spawned, property, {...descriptor, value}); + } + + return spawned; +}; + +// Use promises instead of `child_process` events +const getSpawnedPromise = spawned => { + return new Promise((resolve, reject) => { + spawned.on('exit', (exitCode, signal) => { + resolve({exitCode, signal}); + }); + + spawned.on('error', error => { + reject(error); + }); + + if (spawned.stdin) { + spawned.stdin.on('error', error => { + reject(error); + }); + } + }); +}; + +module.exports = { + mergePromise, + getSpawnedPromise +}; + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js new file mode 100644 index 0000000..45129ed --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js @@ -0,0 +1,52 @@ +'use strict'; +const aliases = ['stdin', 'stdout', 'stderr']; + +const hasAlias = options => aliases.some(alias => options[alias] !== undefined); + +const normalizeStdio = options => { + if (!options) { + return; + } + + const {stdio} = options; + + if (stdio === undefined) { + return aliases.map(alias => options[alias]); + } + + if (hasAlias(options)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`); + } + + if (typeof stdio === 'string') { + return stdio; + } + + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } + + const length = Math.max(stdio.length, aliases.length); + return Array.from({length}, (value, index) => stdio[index]); +}; + +module.exports = normalizeStdio; + +// `ipc` is pushed unless it is already present +module.exports.node = options => { + const stdio = normalizeStdio(options); + + if (stdio === 'ipc') { + return 'ipc'; + } + + if (stdio === undefined || typeof stdio === 'string') { + return [stdio, stdio, stdio, 'ipc']; + } + + if (stdio.includes('ipc')) { + return stdio; + } + + return [...stdio, 'ipc']; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js new file mode 100644 index 0000000..d445dd4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js @@ -0,0 +1,97 @@ +'use strict'; +const isStream = require('is-stream'); +const getStream = require('get-stream'); +const mergeStream = require('merge-stream'); + +// `input` option +const handleInput = (spawned, input) => { + // Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852 + // @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0 + if (input === undefined || spawned.stdin === undefined) { + return; + } + + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); + } +}; + +// `all` interleaves `stdout` and `stderr` +const makeAllStream = (spawned, {all}) => { + if (!all || (!spawned.stdout && !spawned.stderr)) { + return; + } + + const mixed = mergeStream(); + + if (spawned.stdout) { + mixed.add(spawned.stdout); + } + + if (spawned.stderr) { + mixed.add(spawned.stderr); + } + + return mixed; +}; + +// On failure, `result.stdout|stderr|all` should contain the currently buffered stream +const getBufferedData = async (stream, streamPromise) => { + if (!stream) { + return; + } + + stream.destroy(); + + try { + return await streamPromise; + } catch (error) { + return error.bufferedData; + } +}; + +const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => { + if (!stream || !buffer) { + return; + } + + if (encoding) { + return getStream(stream, {encoding, maxBuffer}); + } + + return getStream.buffer(stream, {maxBuffer}); +}; + +// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all) +const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => { + const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer}); + const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer}); + const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2}); + + try { + return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]); + } catch (error) { + return Promise.all([ + {error, signal: error.signal, timedOut: error.timedOut}, + getBufferedData(stdout, stdoutPromise), + getBufferedData(stderr, stderrPromise), + getBufferedData(all, allPromise) + ]); + } +}; + +const validateInputSync = ({input}) => { + if (isStream(input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); + } +}; + +module.exports = { + handleInput, + makeAllStream, + getSpawnedResult, + validateInputSync +}; + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json new file mode 100644 index 0000000..22556f2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json @@ -0,0 +1,74 @@ +{ + "name": "execa", + "version": "5.1.1", + "description": "Process execution for humans", + "license": "MIT", + "repository": "sindresorhus/execa", + "funding": "https://github.com/sindresorhus/execa?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "lib" + ], + "keywords": [ + "exec", + "child", + "process", + "execute", + "fork", + "execfile", + "spawn", + "file", + "shell", + "bin", + "binary", + "binaries", + "npm", + "path", + "local" + ], + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "devDependencies": { + "@types/node": "^14.14.10", + "ava": "^2.4.0", + "get-node": "^11.0.1", + "is-running": "^2.1.0", + "nyc": "^15.1.0", + "p-event": "^4.2.0", + "tempfile": "^3.0.0", + "tsd": "^0.13.1", + "xo": "^0.35.0" + }, + "nyc": { + "reporter": [ + "text", + "lcov" + ], + "exclude": [ + "**/fixtures/**", + "**/test.js", + "**/test/**" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md new file mode 100644 index 0000000..843edbc --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md @@ -0,0 +1,663 @@ + +
+ +[![Coverage Status](https://codecov.io/gh/sindresorhus/execa/branch/main/graph/badge.svg)](https://codecov.io/gh/sindresorhus/execa) + +> Process execution for humans + +## Why + +This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with: + +- Promise interface. +- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`. +- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform. +- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why) +- Higher max buffer. 100 MB instead of 200 KB. +- [Executes locally installed binaries by name.](#preferlocal) +- [Cleans up spawned processes when the parent process dies.](#cleanup) +- [Get interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal. [*(Async only)*](#execasyncfile-arguments-options) +- [Can specify file and arguments as a single string without a shell](#execacommandcommand-options) +- More descriptive errors. + +## Install + +``` +$ npm install execa +``` + +## Usage + +```js +const execa = require('execa'); + +(async () => { + const {stdout} = await execa('echo', ['unicorns']); + console.log(stdout); + //=> 'unicorns' +})(); +``` + +### Pipe the child process stdout to the parent + +```js +const execa = require('execa'); + +execa('echo', ['unicorns']).stdout.pipe(process.stdout); +``` + +### Handling Errors + +```js +const execa = require('execa'); + +(async () => { + // Catching an error + try { + await execa('unknown', ['command']); + } catch (error) { + console.log(error); + /* + { + message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', + errno: -2, + code: 'ENOENT', + syscall: 'spawn unknown', + path: 'unknown', + spawnargs: ['command'], + originalMessage: 'spawn unknown ENOENT', + shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', + command: 'unknown command', + escapedCommand: 'unknown command', + stdout: '', + stderr: '', + all: '', + failed: true, + timedOut: false, + isCanceled: false, + killed: false + } + */ + } + +})(); +``` + +### Cancelling a spawned process + +```js +const execa = require('execa'); + +(async () => { + const subprocess = execa('node'); + + setTimeout(() => { + subprocess.cancel(); + }, 1000); + + try { + await subprocess; + } catch (error) { + console.log(subprocess.killed); // true + console.log(error.isCanceled); // true + } +})() +``` + +### Catching an error with the sync method + +```js +try { + execa.sync('unknown', ['command']); +} catch (error) { + console.log(error); + /* + { + message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', + errno: -2, + code: 'ENOENT', + syscall: 'spawnSync unknown', + path: 'unknown', + spawnargs: ['command'], + originalMessage: 'spawnSync unknown ENOENT', + shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', + command: 'unknown command', + escapedCommand: 'unknown command', + stdout: '', + stderr: '', + all: '', + failed: true, + timedOut: false, + isCanceled: false, + killed: false + } + */ +} +``` + +### Kill a process + +Using SIGTERM, and after 2 seconds, kill it with SIGKILL. + +```js +const subprocess = execa('node'); + +setTimeout(() => { + subprocess.kill('SIGTERM', { + forceKillAfterTimeout: 2000 + }); +}, 1000); +``` + +## API + +### execa(file, arguments, options?) + +Execute a file. Think of this as a mix of [`child_process.execFile()`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback) and [`child_process.spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +No escaping/quoting is needed. + +Unless the [`shell`](#shell) option is used, no shell interpreter (Bash, `cmd.exe`, etc.) is used, so shell features such as variables substitution (`echo $PATH`) are not allowed. + +Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) which: + - is also a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult). + - exposes the following additional methods and properties. + +#### kill(signal?, options?) + +Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal) except: if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. + +##### options.forceKillAfterTimeout + +Type: `number | false`\ +Default: `5000` + +Milliseconds to wait for the child process to terminate before sending `SIGKILL`. + +Can be disabled with `false`. + +#### cancel() + +Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. + +#### all + +Type: `ReadableStream | undefined` + +Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). + +This is `undefined` if either: + - the [`all` option](#all-2) is `false` (the default value) + - both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) + +### execa.sync(file, arguments?, options?) + +Execute a file synchronously. + +Returns or throws a [`childProcessResult`](#childProcessResult). + +### execa.command(command, options?) + +Same as [`execa()`](#execafile-arguments-options) except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. + +If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. + +The [`shell` option](#shell) must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. + +### execa.commandSync(command, options?) + +Same as [`execa.command()`](#execacommand-command-options) but synchronous. + +Returns or throws a [`childProcessResult`](#childProcessResult). + +### execa.node(scriptPath, arguments?, options?) + +Execute a Node.js script as a child process. + +Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): + - the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options. + - the [`shell`](#shell) option cannot be used + - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) + +### childProcessResult + +Type: `object` + +Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. + +The child process [fails](#failed) when: +- its [exit code](#exitcode) is not `0` +- it was [killed](#killed) with a [signal](#signal) +- [timing out](#timedout) +- [being canceled](#iscanceled) +- there's not enough memory or there are already too many child processes + +#### command + +Type: `string` + +The file and arguments that were run, for logging purposes. + +This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). + +#### escapedCommand + +Type: `string` + +Same as [`command`](#command) but escaped. + +This is meant to be copy and pasted into a shell, for debugging purposes. +Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). + +#### exitCode + +Type: `number` + +The numeric exit code of the process that was run. + +#### stdout + +Type: `string | Buffer` + +The output of the process on stdout. + +#### stderr + +Type: `string | Buffer` + +The output of the process on stderr. + +#### all + +Type: `string | Buffer | undefined` + +The output of the process with `stdout` and `stderr` interleaved. + +This is `undefined` if either: + - the [`all` option](#all-2) is `false` (the default value) + - `execa.sync()` was used + +#### failed + +Type: `boolean` + +Whether the process failed to run. + +#### timedOut + +Type: `boolean` + +Whether the process timed out. + +#### isCanceled + +Type: `boolean` + +Whether the process was canceled. + +#### killed + +Type: `boolean` + +Whether the process was killed. + +#### signal + +Type: `string | undefined` + +The name of the signal that was used to terminate the process. For example, `SIGFPE`. + +If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. + +#### signalDescription + +Type: `string | undefined` + +A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. + +If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. + +#### message + +Type: `string` + +Error message when the child process failed to run. In addition to the [underlying error message](#originalMessage), it also contains some information related to why the child process errored. + +The child process [stderr](#stderr) then [stdout](#stdout) are appended to the end, separated with newlines and not interleaved. + +#### shortMessage + +Type: `string` + +This is the same as the [`message` property](#message) except it does not include the child process stdout/stderr. + +#### originalMessage + +Type: `string | undefined` + +Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. + +This is `undefined` unless the child process exited due to an `error` event or a timeout. + +### options + +Type: `object` + +#### cleanup + +Type: `boolean`\ +Default: `true` + +Kill the spawned process when the parent process exits unless either: + - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) + - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit + +#### preferLocal + +Type: `boolean`\ +Default: `false` + +Prefer locally installed binaries when looking for a binary to execute.\ +If you `$ npm install foo`, you can then `execa('foo')`. + +#### localDir + +Type: `string`\ +Default: `process.cwd()` + +Preferred path to find locally installed binaries in (use with `preferLocal`). + +#### execPath + +Type: `string`\ +Default: `process.execPath` (Current Node.js executable) + +Path to the Node.js executable to use in child processes. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + +Requires [`preferLocal`](#preferlocal) to be `true`. + +For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. + +#### buffer + +Type: `boolean`\ +Default: `true` + +Buffer the output from the spawned process. When set to `false`, you must read the output of [`stdout`](#stdout-1) and [`stderr`](#stderr-1) (or [`all`](#all) if the [`all`](#all-2) option is `true`). Otherwise the returned promise will not be resolved/rejected. + +If the spawned process fails, [`error.stdout`](#stdout), [`error.stderr`](#stderr), and [`error.all`](#all) will contain the buffered data. + +#### input + +Type: `string | Buffer | stream.Readable` + +Write some input to the `stdin` of your binary.\ +Streams are not allowed when using the synchronous methods. + +#### stdin + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stdout + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### stderr + +Type: `string | number | Stream | undefined`\ +Default: `pipe` + +Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). + +#### all + +Type: `boolean`\ +Default: `false` + +Add an `.all` property on the [promise](#all) and the [resolved value](#all-1). The property contains the output of the process with `stdout` and `stderr` interleaved. + +#### reject + +Type: `boolean`\ +Default: `true` + +Setting this to `false` resolves the promise with the error instead of rejecting it. + +#### stripFinalNewline + +Type: `boolean`\ +Default: `true` + +Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. + +#### extendEnv + +Type: `boolean`\ +Default: `true` + +Set to `false` if you don't want to extend the environment variables when providing the `env` property. + +--- + +Execa also accepts the below options which are the same as the options for [`child_process#spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options)/[`child_process#exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) + +#### cwd + +Type: `string`\ +Default: `process.cwd()` + +Current working directory of the child process. + +#### env + +Type: `object`\ +Default: `process.env` + +Environment key-value pairs. Extends automatically from `process.env`. Set [`extendEnv`](#extendenv) to `false` if you don't want this. + +#### argv0 + +Type: `string` + +Explicitly set the value of `argv[0]` sent to the child process. This will be set to `file` if not specified. + +#### stdio + +Type: `string | string[]`\ +Default: `pipe` + +Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. + +#### serialization + +Type: `string`\ +Default: `'json'` + +Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execa.node()`](#execanodescriptpath-arguments-options): + - `json`: Uses `JSON.stringify()` and `JSON.parse()`. + - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) + +Requires Node.js `13.2.0` or later. + +[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) + +#### detached + +Type: `boolean` + +Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). + +#### uid + +Type: `number` + +Sets the user identity of the process. + +#### gid + +Type: `number` + +Sets the group identity of the process. + +#### shell + +Type: `boolean | string`\ +Default: `false` + +If `true`, runs `file` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. + +We recommend against using this option since it is: +- not cross-platform, encouraging shell-specific syntax. +- slower, because of the additional shell interpretation. +- unsafe, potentially allowing command injection. + +#### encoding + +Type: `string | null`\ +Default: `utf8` + +Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. + +#### timeout + +Type: `number`\ +Default: `0` + +If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds. + +#### maxBuffer + +Type: `number`\ +Default: `100_000_000` (100 MB) + +Largest amount of data in bytes allowed on `stdout` or `stderr`. + +#### killSignal + +Type: `string | number`\ +Default: `SIGTERM` + +Signal value to be used when the spawned process will be killed. + +#### windowsVerbatimArguments + +Type: `boolean`\ +Default: `false` + +If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. + +#### windowsHide + +Type: `boolean`\ +Default: `true` + +On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. + +#### nodePath *(For `.node()` only)* + +Type: `string`\ +Default: [`process.execPath`](https://nodejs.org/api/process.html#process_process_execpath) + +Node.js executable used to create the child process. + +#### nodeOptions *(For `.node()` only)* + +Type: `string[]`\ +Default: [`process.execArgv`](https://nodejs.org/api/process.html#process_process_execargv) + +List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. + +## Tips + +### Retry on error + +Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package: + +```js +const pRetry = require('p-retry'); + +const run = async () => { + const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']); + return results; +}; + +(async () => { + console.log(await pRetry(run, {retries: 5})); +})(); +``` + +### Save and pipe output from a child process + +Let's say you want to show the output of a child process in real-time while also saving it to a variable. + +```js +const execa = require('execa'); + +const subprocess = execa('echo', ['foo']); +subprocess.stdout.pipe(process.stdout); + +(async () => { + const {stdout} = await subprocess; + console.log('child output:', stdout); +})(); +``` + +### Redirect output to a file + +```js +const execa = require('execa'); + +const subprocess = execa('echo', ['foo']) +subprocess.stdout.pipe(fs.createWriteStream('stdout.txt')) +``` + +### Redirect input from a file + +```js +const execa = require('execa'); + +const subprocess = execa('cat') +fs.createReadStream('stdin.txt').pipe(subprocess.stdin) +``` + +### Execute the current package's binary + +```js +const {getBinPathSync} = require('get-bin-path'); + +const binPath = getBinPathSync(); +const subprocess = execa(binPath); +``` + +`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up. + +## Related + +- [gulp-execa](https://github.com/ehmicky/gulp-execa) - Gulp plugin for `execa` +- [nvexeca](https://github.com/ehmicky/nvexeca) - Run `execa` using any Node.js version +- [sudo-prompt](https://github.com/jorangreef/sudo-prompt) - Run commands with elevated privileges. + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [@ehmicky](https://github.com/ehmicky) + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE new file mode 100644 index 0000000..65a9994 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Denis Malinochkin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md new file mode 100644 index 0000000..1d7843a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md @@ -0,0 +1,830 @@ +# fast-glob + +> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. + +This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. + +## Table of Contents + +
+Details + +* [Highlights](#highlights) +* [Old and modern mode](#old-and-modern-mode) +* [Pattern syntax](#pattern-syntax) + * [Basic syntax](#basic-syntax) + * [Advanced syntax](#advanced-syntax) +* [Installation](#installation) +* [API](#api) + * [Asynchronous](#asynchronous) + * [Synchronous](#synchronous) + * [Stream](#stream) + * [patterns](#patterns) + * [[options]](#options) + * [Helpers](#helpers) + * [generateTasks](#generatetaskspatterns-options) + * [isDynamicPattern](#isdynamicpatternpattern-options) + * [escapePath](#escapepathpath) + * [convertPathToPattern](#convertpathtopatternpath) +* [Options](#options-3) + * [Common](#common) + * [concurrency](#concurrency) + * [cwd](#cwd) + * [deep](#deep) + * [followSymbolicLinks](#followsymboliclinks) + * [fs](#fs) + * [ignore](#ignore) + * [suppressErrors](#suppresserrors) + * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) + * [Output control](#output-control) + * [absolute](#absolute) + * [markDirectories](#markdirectories) + * [objectMode](#objectmode) + * [onlyDirectories](#onlydirectories) + * [onlyFiles](#onlyfiles) + * [stats](#stats) + * [unique](#unique) + * [Matching control](#matching-control) + * [braceExpansion](#braceexpansion) + * [caseSensitiveMatch](#casesensitivematch) + * [dot](#dot) + * [extglob](#extglob) + * [globstar](#globstar) + * [baseNameMatch](#basenamematch) +* [FAQ](#faq) + * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) + * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) + * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) + * [How to use UNC path?](#how-to-use-unc-path) + * [Compatible with `node-glob`?](#compatible-with-node-glob) +* [Benchmarks](#benchmarks) + * [Server](#server) + * [Nettop](#nettop) +* [Changelog](#changelog) +* [License](#license) + +
+ +## Highlights + +* Fast. Probably the fastest. +* Supports multiple and negative patterns. +* Synchronous, Promise and Stream API. +* Object mode. Can return more than just strings. +* Error-tolerant. + +## Old and modern mode + +This package works in two modes, depending on the environment in which it is used. + +* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. +* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. + +The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. + +## Pattern syntax + +> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. + +There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). + +> :book: This package uses [`micromatch`][micromatch] as a library for pattern matching. + +### Basic syntax + +* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). +* A double star or globstar (`**`) — matches zero or more directories. +* Question mark (`?`) – matches any single character except slashes (path separators). +* Sequence (`[seq]`) — matches any character in sequence. + +> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. + +Some examples: + +* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. +* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. +* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. + +### Advanced syntax + +* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. +* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). +* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). +* [Bash style brace expansions][micromatch_braces] (`{}`). +* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). +* [Regex groups][regular_expressions_brackets] (`(a|b)`). + +> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. + +Some examples: + +* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. +* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. +* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. +* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. + +## Installation + +```console +npm install fast-glob +``` + +## API + +### Asynchronous + +```js +fg(patterns, [options]) +fg.async(patterns, [options]) +fg.glob(patterns, [options]) +``` + +Returns a `Promise` with an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Synchronous + +```js +fg.sync(patterns, [options]) +fg.globSync(patterns, [options]) +``` + +Returns an array of matching entries. + +```js +const fg = require('fast-glob'); + +const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); + +// ['.editorconfig', 'services/index.js'] +``` + +### Stream + +```js +fg.stream(patterns, [options]) +fg.globStream(patterns, [options]) +``` + +Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. + +```js +const fg = require('fast-glob'); + +const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); + +for await (const entry of stream) { + // .editorconfig + // services/index.js +} +``` + +#### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +> :1234: [Pattern syntax](#pattern-syntax) +> +> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. + +#### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +### Helpers + +#### `generateTasks(patterns, [options])` + +Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). + +```js +fg.generateTasks('*'); + +[{ + base: '.', // Parent directory for all patterns inside this task + dynamic: true, // Dynamic or static patterns are in this task + patterns: ['*'], + positive: ['*'], + negative: [] +}] +``` + +##### patterns + +* Required: `true` +* Type: `string | string[]` + +Any correct pattern(s). + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `isDynamicPattern(pattern, [options])` + +Returns `true` if the passed pattern is a dynamic pattern. + +> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) + +```js +fg.isDynamicPattern('*'); // true +fg.isDynamicPattern('abc'); // false +``` + +##### pattern + +* Required: `true` +* Type: `string` + +Any correct pattern. + +##### [options] + +* Required: `false` +* Type: [`Options`](#options-3) + +See [Options](#options-3) section. + +#### `escapePath(path)` + +Returns the path with escaped special characters depending on the platform. + +* Posix: + * `*?|(){}[]`; + * `!` at the beginning of line; + * `@+!` before the opening parenthesis; + * `\\` before non-special characters; +* Windows: + * `(){}[]` + * `!` at the beginning of line; + * `@+!` before the opening parenthesis; + * Characters like `*?|` cannot be used in the path ([windows_naming_conventions][windows_naming_conventions]), so they will not be escaped; + +```js +fg.escapePath('!abc'); +// \\!abc +fg.escapePath('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac' +// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac + +fg.posix.escapePath('C:\\Program Files (x86)\\**\\*'); +// C:\\\\Program Files \\(x86\\)\\*\\*\\* +fg.win32.escapePath('C:\\Program Files (x86)\\**\\*'); +// Windows: C:\\Program Files \\(x86\\)\\**\\* +``` + +#### `convertPathToPattern(path)` + +Converts a path to a pattern depending on the platform, including special character escaping. + +* Posix. Works similarly to the `fg.posix.escapePath` method. +* Windows. Works similarly to the `fg.win32.escapePath` method, additionally converting backslashes to forward slashes in cases where they are not escape characters (`!()+@{}[]`). + +```js +fg.convertPathToPattern('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac'; +// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac + +fg.convertPathToPattern('C:/Program Files (x86)/**/*'); +// Posix: C:/Program Files \\(x86\\)/\\*\\*/\\* +// Windows: C:/Program Files \\(x86\\)/**/* + +fg.convertPathToPattern('C:\\Program Files (x86)\\**\\*'); +// Posix: C:\\\\Program Files \\(x86\\)\\*\\*\\* +// Windows: C:/Program Files \\(x86\\)/**/* + +fg.posix.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; +// Posix: \\\\\\?\\\\c:\\\\Program Files \\(x86\\)/**/* (broken pattern) +fg.win32.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; +// Windows: //?/c:/Program Files \\(x86\\)/**/* +``` + +## Options + +### Common options + +#### concurrency + +* Type: `number` +* Default: `os.cpus().length` + +Specifies the maximum number of concurrent requests from a reader to read directories. + +> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. + +
+ +More details + +In Node, there are [two types of threads][nodejs_thread_pool]: Event Loop (code) and a Thread Pool (fs, dns, …). The thread pool size controlled by the `UV_THREADPOOL_SIZE` environment variable. Its default size is 4 ([documentation][libuv_thread_pool]). The pool is one for all tasks within a single Node process. + +Any code can make 4 real concurrent accesses to the file system. The rest of the FS requests will wait in the queue. + +> :book: Each new instance of FG in the same Node process will use the same Thread pool. + +But this package also has the `concurrency` option. This option allows you to control the number of concurrent accesses to the FS at the package level. By default, this package has a value equal to the number of cores available for the current Node process. This allows you to set a value smaller than the pool size (`concurrency: 1`) or, conversely, to prepare tasks for the pool queue more quickly (`concurrency: Number.POSITIVE_INFINITY`). + +So, in fact, this package can **only make 4 concurrent requests to the FS**. You can increase this value by using an environment variable (`UV_THREADPOOL_SIZE`), but in practice this does not give a multiple advantage. + +
+ +#### cwd + +* Type: `string` +* Default: `process.cwd()` + +The current working directory in which to search. + +#### deep + +* Type: `number` +* Default: `Infinity` + +Specifies the maximum depth of a read directory relative to the start directory. + +For example, you have the following tree: + +```js +dir/ +└── one/ // 1 + └── two/ // 2 + └── file.js // 3 +``` + +```js +// With base directory +fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] +fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] + +// With cwd option +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] +fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] +``` + +> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. + +#### followSymbolicLinks + +* Type: `boolean` +* Default: `true` + +Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. + +> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. + +> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. + +#### fs + +* Type: `FileSystemAdapter` +* Default: `fs.*` + +Custom implementation of methods for working with the file system. Supports objects with enumerable properties only. + +```ts +export interface FileSystemAdapter { + lstat?: typeof fs.lstat; + stat?: typeof fs.stat; + lstatSync?: typeof fs.lstatSync; + statSync?: typeof fs.statSync; + readdir?: typeof fs.readdir; + readdirSync?: typeof fs.readdirSync; +} +``` + +#### ignore + +* Type: `string[]` +* Default: `[]` + +An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. + +```js +dir/ +├── package-lock.json +└── package.json +``` + +```js +fg.sync(['*.json', '!package-lock.json']); // ['package.json'] +fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] +``` + +#### suppressErrors + +* Type: `boolean` +* Default: `false` + +By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. + +> :book: Can be useful when the directory has entries with a special level of access. + +#### throwErrorOnBrokenSymbolicLink + +* Type: `boolean` +* Default: `false` + +Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. + +> :book: This option has no effect on errors when reading the symbolic link directory. + +### Output control + +#### absolute + +* Type: `boolean` +* Default: `false` + +Return the absolute path for entries. + +```js +fg.sync('*.js', { absolute: false }); // ['index.js'] +fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] +``` + +> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. + +#### markDirectories + +* Type: `boolean` +* Default: `false` + +Mark the directory path with the final slash. + +```js +fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] +fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] +``` + +#### objectMode + +* Type: `boolean` +* Default: `false` + +Returns objects (instead of strings) describing entries. + +```js +fg.sync('*', { objectMode: false }); // ['src/index.js'] +fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] +``` + +The object has the following fields: + +* name (`string`) — the last part of the path (basename) +* path (`string`) — full path relative to the pattern base directory +* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` + +> :book: An object is an internal representation of entry, so getting it does not affect performance. + +#### onlyDirectories + +* Type: `boolean` +* Default: `false` + +Return only directories. + +```js +fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] +fg.sync('*', { onlyDirectories: true }); // ['src'] +``` + +> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. + +#### onlyFiles + +* Type: `boolean` +* Default: `true` + +Return only files. + +```js +fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] +fg.sync('*', { onlyFiles: true }); // ['index.js'] +``` + +#### stats + +* Type: `boolean` +* Default: `false` + +Enables an [object mode](#objectmode) with an additional field: + +* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` + +```js +fg.sync('*', { stats: false }); // ['src/index.js'] +fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] +``` + +> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. +> +> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. + +#### unique + +* Type: `boolean` +* Default: `true` + +Ensures that the returned entries are unique. + +```js +fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] +fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] +``` + +If `true` and similar entries are found, the result is the first found. + +### Matching control + +#### braceExpansion + +* Type: `boolean` +* Default: `true` + +Enables Bash-like brace expansion. + +> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. + +```js +dir/ +├── abd +├── acd +└── a{b,c}d +``` + +```js +fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] +fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] +``` + +#### caseSensitiveMatch + +* Type: `boolean` +* Default: `true` + +Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. + +```js +dir/ +├── file.txt +└── File.txt +``` + +```js +fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] +fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] +``` + +#### dot + +* Type: `boolean` +* Default: `false` + +Allow patterns to match entries that begin with a period (`.`). + +> :book: Note that an explicit dot in a portion of the pattern will always match dot files. + +```js +dir/ +├── .editorconfig +└── package.json +``` + +```js +fg.sync('*', { dot: false }); // ['package.json'] +fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] +``` + +#### extglob + +* Type: `boolean` +* Default: `true` + +Enables Bash-like `extglob` functionality. + +> :1234: [Syntax description][micromatch_extglobs]. + +```js +dir/ +├── README.md +└── package.json +``` + +```js +fg.sync('*.+(json|md)', { extglob: false }); // [] +fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] +``` + +#### globstar + +* Type: `boolean` +* Default: `true` + +Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. + +```js +dir/ +└── a + └── b +``` + +```js +fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] +fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] +``` + +#### baseNameMatch + +* Type: `boolean` +* Default: `false` + +If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. + +```js +dir/ +└── one/ + └── file.md +``` + +```js +fg.sync('*.md', { baseNameMatch: false }); // [] +fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] +``` + +## FAQ + +## What is a static or dynamic pattern? + +All patterns can be divided into two types: + +* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. +* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. + +A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: + +* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled +* `\\` (the escape character) +* `*`, `?`, `!` (at the beginning of line) +* `[…]` +* `(…|…)` +* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) +* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) + +## How to write patterns on Windows? + +Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. + +**Bad** + +```ts +[ + 'directory\\*', + path.join(process.cwd(), '**') +] +``` + +**Good** + +```ts +[ + 'directory/*', + fg.convertPathToPattern(process.cwd()) + '/**' +] +``` + +> :book: Use the [`.convertPathToPattern`](#convertpathtopatternpath) package to convert Windows-style path to a Unix-style path. + +Read more about [matching with backslashes][micromatch_backslashes]. + +## Why are parentheses match wrong? + +```js +dir/ +└── (special-*file).txt +``` + +```js +fg.sync(['(special-*file).txt']) // [] +``` + +Refers to Bash. You need to escape special characters: + +```js +fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] +``` + +Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. Or use the [`.escapePath`](#escapepathpath). + +## How to exclude directory from reading? + +You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. + +```js +first/ +├── file.md +└── second/ + └── file.txt +``` + +If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. + +```js +fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] +fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] +``` + +> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. + +You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. + +## How to use UNC path? + +You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax) directly, but you can use them as [`cwd`](#cwd) directory or use the `fg.convertPathToPattern` method. + +```ts +// cwd +fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); +fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); + +// .convertPathToPattern +fg.sync(fg.convertPathToPattern('\\\\?\\c:\\Python27') + '/*'); +``` + +## Compatible with `node-glob`? + +| node-glob | fast-glob | +| :----------: | :-------: | +| `cwd` | [`cwd`](#cwd) | +| `root` | – | +| `dot` | [`dot`](#dot) | +| `nomount` | – | +| `mark` | [`markDirectories`](#markdirectories) | +| `nosort` | – | +| `nounique` | [`unique`](#unique) | +| `nobrace` | [`braceExpansion`](#braceexpansion) | +| `noglobstar` | [`globstar`](#globstar) | +| `noext` | [`extglob`](#extglob) | +| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | +| `matchBase` | [`baseNameMatch`](#basenamematch) | +| `nodir` | [`onlyFiles`](#onlyfiles) | +| `ignore` | [`ignore`](#ignore) | +| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | +| `realpath` | – | +| `absolute` | [`absolute`](#absolute) | + +## Benchmarks + +You can see results [here](https://github.com/mrmlnc/fast-glob/actions/workflows/benchmark.yml?query=branch%3Amaster) for every commit into the `main` branch. + +* **Product benchmark** – comparison with the main competitors. +* **Regress benchmark** – regression between the current version and the version from the npm registry. + +## Changelog + +See the [Releases section of our GitHub project][github_releases] for changelog for each release version. + +## License + +This software is released under the terms of the MIT license. + +[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace +[github_releases]: https://github.com/mrmlnc/fast-glob/releases +[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) +[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html +[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes +[micromatch_braces]: https://github.com/micromatch/braces +[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing +[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs +[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes +[micromatch]: https://github.com/micromatch/micromatch +[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent +[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats +[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams +[node_js]: https://nodejs.org/en +[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode +[npm_normalize_path]: https://www.npmjs.com/package/normalize-path +[npm_unixify]: https://www.npmjs.com/package/unixify +[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash +[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals +[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets +[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html +[unc_path]: https://learn.microsoft.com/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc +[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity +[nodejs_thread_pool]: https://nodejs.org/en/docs/guides/dont-block-the-event-loop +[libuv_thread_pool]: http://docs.libuv.org/en/v1.x/threadpool.html +[windows_naming_conventions]: https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts new file mode 100644 index 0000000..46823bb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts @@ -0,0 +1,40 @@ +/// +import * as taskManager from './managers/tasks'; +import { Options as OptionsInternal } from './settings'; +import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; +type EntryObjectModePredicate = { + [TKey in keyof Pick]-?: true; +}; +type EntryStatsPredicate = { + [TKey in keyof Pick]-?: true; +}; +type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; +declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; +declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; +declare namespace FastGlob { + type Options = OptionsInternal; + type Entry = EntryInternal; + type Task = taskManager.Task; + type Pattern = PatternInternal; + type FileSystemAdapter = FileSystemAdapterInternal; + const glob: typeof FastGlob; + const globSync: typeof sync; + const globStream: typeof stream; + const async: typeof FastGlob; + function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; + function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; + function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; + function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; + function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + namespace posix { + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + } + namespace win32 { + function escapePath(source: string): PatternInternal; + function convertPathToPattern(source: string): PatternInternal; + } +} +export = FastGlob; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js new file mode 100644 index 0000000..90365d4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js @@ -0,0 +1,102 @@ +"use strict"; +const taskManager = require("./managers/tasks"); +const async_1 = require("./providers/async"); +const stream_1 = require("./providers/stream"); +const sync_1 = require("./providers/sync"); +const settings_1 = require("./settings"); +const utils = require("./utils"); +async function FastGlob(source, options) { + assertPatternsInput(source); + const works = getWorks(source, async_1.default, options); + const result = await Promise.all(works); + return utils.array.flatten(result); +} +// https://github.com/typescript-eslint/typescript-eslint/issues/60 +// eslint-disable-next-line no-redeclare +(function (FastGlob) { + FastGlob.glob = FastGlob; + FastGlob.globSync = sync; + FastGlob.globStream = stream; + FastGlob.async = FastGlob; + function sync(source, options) { + assertPatternsInput(source); + const works = getWorks(source, sync_1.default, options); + return utils.array.flatten(works); + } + FastGlob.sync = sync; + function stream(source, options) { + assertPatternsInput(source); + const works = getWorks(source, stream_1.default, options); + /** + * The stream returned by the provider cannot work with an asynchronous iterator. + * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. + * This affects performance (+25%). I don't see best solution right now. + */ + return utils.stream.merge(works); + } + FastGlob.stream = stream; + function generateTasks(source, options) { + assertPatternsInput(source); + const patterns = [].concat(source); + const settings = new settings_1.default(options); + return taskManager.generate(patterns, settings); + } + FastGlob.generateTasks = generateTasks; + function isDynamicPattern(source, options) { + assertPatternsInput(source); + const settings = new settings_1.default(options); + return utils.pattern.isDynamicPattern(source, settings); + } + FastGlob.isDynamicPattern = isDynamicPattern; + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escape(source); + } + FastGlob.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPathToPattern(source); + } + FastGlob.convertPathToPattern = convertPathToPattern; + let posix; + (function (posix) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapePosixPath(source); + } + posix.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertPosixPathToPattern(source); + } + posix.convertPathToPattern = convertPathToPattern; + })(posix = FastGlob.posix || (FastGlob.posix = {})); + let win32; + (function (win32) { + function escapePath(source) { + assertPatternsInput(source); + return utils.path.escapeWindowsPath(source); + } + win32.escapePath = escapePath; + function convertPathToPattern(source) { + assertPatternsInput(source); + return utils.path.convertWindowsPathToPattern(source); + } + win32.convertPathToPattern = convertPathToPattern; + })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); +})(FastGlob || (FastGlob = {})); +function getWorks(source, _Provider, options) { + const patterns = [].concat(source); + const settings = new settings_1.default(options); + const tasks = taskManager.generate(patterns, settings); + const provider = new _Provider(settings); + return tasks.map(provider.read, provider); +} +function assertPatternsInput(input) { + const source = [].concat(input); + const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); + if (!isValidSource) { + throw new TypeError('Patterns must be a string (non empty) or an array of strings'); + } +} +module.exports = FastGlob; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts new file mode 100644 index 0000000..59d2c42 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts @@ -0,0 +1,22 @@ +import Settings from '../settings'; +import { Pattern, PatternsGroup } from '../types'; +export type Task = { + base: string; + dynamic: boolean; + patterns: Pattern[]; + positive: Pattern[]; + negative: Pattern[]; +}; +export declare function generate(input: Pattern[], settings: Settings): Task[]; +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; +export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; +export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; +export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js new file mode 100644 index 0000000..335a765 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js @@ -0,0 +1,110 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; +const utils = require("../utils"); +function generate(input, settings) { + const patterns = processPatterns(input, settings); + const ignore = processPatterns(settings.ignore, settings); + const positivePatterns = getPositivePatterns(patterns); + const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); + const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); + const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); + const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); + const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); + return staticTasks.concat(dynamicTasks); +} +exports.generate = generate; +function processPatterns(input, settings) { + let patterns = input; + /** + * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry + * and some problems with the micromatch package (see fast-glob issues: #365, #394). + * + * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown + * in matching in the case of a large set of patterns after expansion. + */ + if (settings.braceExpansion) { + patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); + } + /** + * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used + * at any nesting level. + * + * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change + * the pattern in the filter before creating a regular expression. There is no need to change the patterns + * in the application. Only on the input. + */ + if (settings.baseNameMatch) { + patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); + } + /** + * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. + */ + return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); +} +/** + * Returns tasks grouped by basic pattern directories. + * + * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. + * This is necessary because directory traversal starts at the base directory and goes deeper. + */ +function convertPatternsToTasks(positive, negative, dynamic) { + const tasks = []; + const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); + const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); + const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); + const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); + tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); + /* + * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory + * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. + */ + if ('.' in insideCurrentDirectoryGroup) { + tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); + } + else { + tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); + } + return tasks; +} +exports.convertPatternsToTasks = convertPatternsToTasks; +function getPositivePatterns(patterns) { + return utils.pattern.getPositivePatterns(patterns); +} +exports.getPositivePatterns = getPositivePatterns; +function getNegativePatternsAsPositive(patterns, ignore) { + const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); + const positive = negative.map(utils.pattern.convertToPositivePattern); + return positive; +} +exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; +function groupPatternsByBaseDirectory(patterns) { + const group = {}; + return patterns.reduce((collection, pattern) => { + const base = utils.pattern.getBaseDirectory(pattern); + if (base in collection) { + collection[base].push(pattern); + } + else { + collection[base] = [pattern]; + } + return collection; + }, group); +} +exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; +function convertPatternGroupsToTasks(positive, negative, dynamic) { + return Object.keys(positive).map((base) => { + return convertPatternGroupToTask(base, positive[base], negative, dynamic); + }); +} +exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; +function convertPatternGroupToTask(base, positive, negative, dynamic) { + return { + dynamic, + positive, + negative, + base, + patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) + }; +} +exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts new file mode 100644 index 0000000..2742616 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import ReaderAsync from '../readers/async'; +import Provider from './provider'; +export default class ProviderAsync extends Provider> { + protected _reader: ReaderAsync; + read(task: Task): Promise; + api(root: string, task: Task, options: ReaderOptions): Promise; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js new file mode 100644 index 0000000..0c5286e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const async_1 = require("../readers/async"); +const provider_1 = require("./provider"); +class ProviderAsync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new async_1.default(this._settings); + } + async read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = await this.api(root, task, options); + return entries.map((entry) => options.transform(entry)); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderAsync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts new file mode 100644 index 0000000..377fab8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts @@ -0,0 +1,16 @@ +import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; +import Settings from '../../settings'; +export default class DeepFilter { + private readonly _settings; + private readonly _micromatchOptions; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _getMatcher; + private _getNegativePatternsRe; + private _filter; + private _isSkippedByDeep; + private _getEntryLevel; + private _isSkippedSymbolicLink; + private _isSkippedByPositivePatterns; + private _isSkippedByNegativePatterns; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js new file mode 100644 index 0000000..644bf41 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +const partial_1 = require("../matchers/partial"); +class DeepFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + } + getFilter(basePath, positive, negative) { + const matcher = this._getMatcher(positive); + const negativeRe = this._getNegativePatternsRe(negative); + return (entry) => this._filter(basePath, entry, matcher, negativeRe); + } + _getMatcher(patterns) { + return new partial_1.default(patterns, this._settings, this._micromatchOptions); + } + _getNegativePatternsRe(patterns) { + const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); + return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); + } + _filter(basePath, entry, matcher, negativeRe) { + if (this._isSkippedByDeep(basePath, entry.path)) { + return false; + } + if (this._isSkippedSymbolicLink(entry)) { + return false; + } + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._isSkippedByPositivePatterns(filepath, matcher)) { + return false; + } + return this._isSkippedByNegativePatterns(filepath, negativeRe); + } + _isSkippedByDeep(basePath, entryPath) { + /** + * Avoid unnecessary depth calculations when it doesn't matter. + */ + if (this._settings.deep === Infinity) { + return false; + } + return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; + } + _getEntryLevel(basePath, entryPath) { + const entryPathDepth = entryPath.split('/').length; + if (basePath === '') { + return entryPathDepth; + } + const basePathDepth = basePath.split('/').length; + return entryPathDepth - basePathDepth; + } + _isSkippedSymbolicLink(entry) { + return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); + } + _isSkippedByPositivePatterns(entryPath, matcher) { + return !this._settings.baseNameMatch && !matcher.match(entryPath); + } + _isSkippedByNegativePatterns(entryPath, patternsRe) { + return !utils.pattern.matchAny(entryPath, patternsRe); + } +} +exports.default = DeepFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts new file mode 100644 index 0000000..23db353 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts @@ -0,0 +1,17 @@ +import Settings from '../../settings'; +import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; +export default class EntryFilter { + private readonly _settings; + private readonly _micromatchOptions; + readonly index: Map; + constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); + getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; + private _filter; + private _isDuplicateEntry; + private _createIndexRecord; + private _onlyFileFilter; + private _onlyDirectoryFilter; + private _isMatchToPatternsSet; + private _isMatchToAbsoluteNegative; + private _isMatchToPatterns; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js new file mode 100644 index 0000000..0c9210c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js @@ -0,0 +1,85 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryFilter { + constructor(_settings, _micromatchOptions) { + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this.index = new Map(); + } + getFilter(positive, negative) { + const [absoluteNegative, relativeNegative] = utils.pattern.partitionAbsoluteAndRelative(negative); + const patterns = { + positive: { + all: utils.pattern.convertPatternsToRe(positive, this._micromatchOptions) + }, + negative: { + absolute: utils.pattern.convertPatternsToRe(absoluteNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })), + relative: utils.pattern.convertPatternsToRe(relativeNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })) + } + }; + return (entry) => this._filter(entry, patterns); + } + _filter(entry, patterns) { + const filepath = utils.path.removeLeadingDotSegment(entry.path); + if (this._settings.unique && this._isDuplicateEntry(filepath)) { + return false; + } + if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { + return false; + } + const isMatched = this._isMatchToPatternsSet(filepath, patterns, entry.dirent.isDirectory()); + if (this._settings.unique && isMatched) { + this._createIndexRecord(filepath); + } + return isMatched; + } + _isDuplicateEntry(filepath) { + return this.index.has(filepath); + } + _createIndexRecord(filepath) { + this.index.set(filepath, undefined); + } + _onlyFileFilter(entry) { + return this._settings.onlyFiles && !entry.dirent.isFile(); + } + _onlyDirectoryFilter(entry) { + return this._settings.onlyDirectories && !entry.dirent.isDirectory(); + } + _isMatchToPatternsSet(filepath, patterns, isDirectory) { + const isMatched = this._isMatchToPatterns(filepath, patterns.positive.all, isDirectory); + if (!isMatched) { + return false; + } + const isMatchedByRelativeNegative = this._isMatchToPatterns(filepath, patterns.negative.relative, isDirectory); + if (isMatchedByRelativeNegative) { + return false; + } + const isMatchedByAbsoluteNegative = this._isMatchToAbsoluteNegative(filepath, patterns.negative.absolute, isDirectory); + if (isMatchedByAbsoluteNegative) { + return false; + } + return true; + } + _isMatchToAbsoluteNegative(filepath, patternsRe, isDirectory) { + if (patternsRe.length === 0) { + return false; + } + const fullpath = utils.path.makeAbsolute(this._settings.cwd, filepath); + return this._isMatchToPatterns(fullpath, patternsRe, isDirectory); + } + _isMatchToPatterns(filepath, patternsRe, isDirectory) { + if (patternsRe.length === 0) { + return false; + } + // Trying to match files and directories by patterns. + const isMatched = utils.pattern.matchAny(filepath, patternsRe); + // A pattern with a trailling slash can be used for directory matching. + // To apply such pattern, we need to add a tralling slash to the path. + if (!isMatched && isDirectory) { + return utils.pattern.matchAny(filepath + '/', patternsRe); + } + return isMatched; + } +} +exports.default = EntryFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts new file mode 100644 index 0000000..170eb25 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { ErrorFilterFunction } from '../../types'; +export default class ErrorFilter { + private readonly _settings; + constructor(_settings: Settings); + getFilter(): ErrorFilterFunction; + private _isNonFatalError; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js new file mode 100644 index 0000000..1c6f241 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class ErrorFilter { + constructor(_settings) { + this._settings = _settings; + } + getFilter() { + return (error) => this._isNonFatalError(error); + } + _isNonFatalError(error) { + return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; + } +} +exports.default = ErrorFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts new file mode 100644 index 0000000..d04c232 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts @@ -0,0 +1,33 @@ +import { Pattern, MicromatchOptions, PatternRe } from '../../types'; +import Settings from '../../settings'; +export type PatternSegment = StaticPatternSegment | DynamicPatternSegment; +type StaticPatternSegment = { + dynamic: false; + pattern: Pattern; +}; +type DynamicPatternSegment = { + dynamic: true; + pattern: Pattern; + patternRe: PatternRe; +}; +export type PatternSection = PatternSegment[]; +export type PatternInfo = { + /** + * Indicates that the pattern has a globstar (more than a single section). + */ + complete: boolean; + pattern: Pattern; + segments: PatternSegment[]; + sections: PatternSection[]; +}; +export default abstract class Matcher { + private readonly _patterns; + private readonly _settings; + private readonly _micromatchOptions; + protected readonly _storage: PatternInfo[]; + constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); + private _fillStorage; + private _getPatternSegments; + private _splitSegmentsIntoSections; +} +export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js new file mode 100644 index 0000000..eae67c9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class Matcher { + constructor(_patterns, _settings, _micromatchOptions) { + this._patterns = _patterns; + this._settings = _settings; + this._micromatchOptions = _micromatchOptions; + this._storage = []; + this._fillStorage(); + } + _fillStorage() { + for (const pattern of this._patterns) { + const segments = this._getPatternSegments(pattern); + const sections = this._splitSegmentsIntoSections(segments); + this._storage.push({ + complete: sections.length <= 1, + pattern, + segments, + sections + }); + } + } + _getPatternSegments(pattern) { + const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); + return parts.map((part) => { + const dynamic = utils.pattern.isDynamicPattern(part, this._settings); + if (!dynamic) { + return { + dynamic: false, + pattern: part + }; + } + return { + dynamic: true, + pattern: part, + patternRe: utils.pattern.makeRe(part, this._micromatchOptions) + }; + }); + } + _splitSegmentsIntoSections(segments) { + return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); + } +} +exports.default = Matcher; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts new file mode 100644 index 0000000..91520f6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts @@ -0,0 +1,4 @@ +import Matcher from './matcher'; +export default class PartialMatcher extends Matcher { + match(filepath: string): boolean; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js new file mode 100644 index 0000000..1dfffeb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const matcher_1 = require("./matcher"); +class PartialMatcher extends matcher_1.default { + match(filepath) { + const parts = filepath.split('/'); + const levels = parts.length; + const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); + for (const pattern of patterns) { + const section = pattern.sections[0]; + /** + * In this case, the pattern has a globstar and we must read all directories unconditionally, + * but only if the level has reached the end of the first group. + * + * fixtures/{a,b}/** + * ^ true/false ^ always true + */ + if (!pattern.complete && levels > section.length) { + return true; + } + const match = parts.every((part, index) => { + const segment = pattern.segments[index]; + if (segment.dynamic && segment.patternRe.test(part)) { + return true; + } + if (!segment.dynamic && segment.pattern === part) { + return true; + } + return false; + }); + if (match) { + return true; + } + } + return false; + } +} +exports.default = PartialMatcher; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts new file mode 100644 index 0000000..1053460 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts @@ -0,0 +1,19 @@ +import { Task } from '../managers/tasks'; +import Settings from '../settings'; +import { MicromatchOptions, ReaderOptions } from '../types'; +import DeepFilter from './filters/deep'; +import EntryFilter from './filters/entry'; +import ErrorFilter from './filters/error'; +import EntryTransformer from './transformers/entry'; +export default abstract class Provider { + protected readonly _settings: Settings; + readonly errorFilter: ErrorFilter; + readonly entryFilter: EntryFilter; + readonly deepFilter: DeepFilter; + readonly entryTransformer: EntryTransformer; + constructor(_settings: Settings); + abstract read(_task: Task): T; + protected _getRootDirectory(task: Task): string; + protected _getReaderOptions(task: Task): ReaderOptions; + protected _getMicromatchOptions(): MicromatchOptions; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js new file mode 100644 index 0000000..da88ee0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const deep_1 = require("./filters/deep"); +const entry_1 = require("./filters/entry"); +const error_1 = require("./filters/error"); +const entry_2 = require("./transformers/entry"); +class Provider { + constructor(_settings) { + this._settings = _settings; + this.errorFilter = new error_1.default(this._settings); + this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); + this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); + this.entryTransformer = new entry_2.default(this._settings); + } + _getRootDirectory(task) { + return path.resolve(this._settings.cwd, task.base); + } + _getReaderOptions(task) { + const basePath = task.base === '.' ? '' : task.base; + return { + basePath, + pathSegmentSeparator: '/', + concurrency: this._settings.concurrency, + deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), + entryFilter: this.entryFilter.getFilter(task.positive, task.negative), + errorFilter: this.errorFilter.getFilter(), + followSymbolicLinks: this._settings.followSymbolicLinks, + fs: this._settings.fs, + stats: this._settings.stats, + throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, + transform: this.entryTransformer.getTransformer() + }; + } + _getMicromatchOptions() { + return { + dot: this._settings.dot, + matchBase: this._settings.baseNameMatch, + nobrace: !this._settings.braceExpansion, + nocase: !this._settings.caseSensitiveMatch, + noext: !this._settings.extglob, + noglobstar: !this._settings.globstar, + posix: true, + strictSlashes: false + }; + } +} +exports.default = Provider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts new file mode 100644 index 0000000..3d02a1f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts @@ -0,0 +1,11 @@ +/// +import { Readable } from 'stream'; +import { Task } from '../managers/tasks'; +import ReaderStream from '../readers/stream'; +import { ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderStream extends Provider { + protected _reader: ReaderStream; + read(task: Task): Readable; + api(root: string, task: Task, options: ReaderOptions): Readable; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js new file mode 100644 index 0000000..85da62e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const stream_2 = require("../readers/stream"); +const provider_1 = require("./provider"); +class ProviderStream extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new stream_2.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const source = this.api(root, task, options); + const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); + source + .once('error', (error) => destination.emit('error', error)) + .on('data', (entry) => destination.emit('data', options.transform(entry))) + .once('end', () => destination.emit('end')); + destination + .once('close', () => source.destroy()); + return destination; + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts new file mode 100644 index 0000000..9c0fe1e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts @@ -0,0 +1,9 @@ +import { Task } from '../managers/tasks'; +import ReaderSync from '../readers/sync'; +import { Entry, EntryItem, ReaderOptions } from '../types'; +import Provider from './provider'; +export default class ProviderSync extends Provider { + protected _reader: ReaderSync; + read(task: Task): EntryItem[]; + api(root: string, task: Task, options: ReaderOptions): Entry[]; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js new file mode 100644 index 0000000..d70aa1b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const sync_1 = require("../readers/sync"); +const provider_1 = require("./provider"); +class ProviderSync extends provider_1.default { + constructor() { + super(...arguments); + this._reader = new sync_1.default(this._settings); + } + read(task) { + const root = this._getRootDirectory(task); + const options = this._getReaderOptions(task); + const entries = this.api(root, task, options); + return entries.map(options.transform); + } + api(root, task, options) { + if (task.dynamic) { + return this._reader.dynamic(root, options); + } + return this._reader.static(task.patterns, options); + } +} +exports.default = ProviderSync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts new file mode 100644 index 0000000..e9b85fa --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts @@ -0,0 +1,8 @@ +import Settings from '../../settings'; +import { EntryTransformerFunction } from '../../types'; +export default class EntryTransformer { + private readonly _settings; + constructor(_settings: Settings); + getTransformer(): EntryTransformerFunction; + private _transform; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js new file mode 100644 index 0000000..d11903c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const utils = require("../../utils"); +class EntryTransformer { + constructor(_settings) { + this._settings = _settings; + } + getTransformer() { + return (entry) => this._transform(entry); + } + _transform(entry) { + let filepath = entry.path; + if (this._settings.absolute) { + filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); + filepath = utils.path.unixify(filepath); + } + if (this._settings.markDirectories && entry.dirent.isDirectory()) { + filepath += '/'; + } + if (!this._settings.objectMode) { + return filepath; + } + return Object.assign(Object.assign({}, entry), { path: filepath }); + } +} +exports.default = EntryTransformer; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts new file mode 100644 index 0000000..fbca428 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts @@ -0,0 +1,10 @@ +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, ReaderOptions, Pattern } from '../types'; +import Reader from './reader'; +import ReaderStream from './stream'; +export default class ReaderAsync extends Reader> { + protected _walkAsync: typeof fsWalk.walk; + protected _readerStream: ReaderStream; + dynamic(root: string, options: ReaderOptions): Promise; + static(patterns: Pattern[], options: ReaderOptions): Promise; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js new file mode 100644 index 0000000..d024145 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +const stream_1 = require("./stream"); +class ReaderAsync extends reader_1.default { + constructor() { + super(...arguments); + this._walkAsync = fsWalk.walk; + this._readerStream = new stream_1.default(this._settings); + } + dynamic(root, options) { + return new Promise((resolve, reject) => { + this._walkAsync(root, options, (error, entries) => { + if (error === null) { + resolve(entries); + } + else { + reject(error); + } + }); + }); + } + async static(patterns, options) { + const entries = []; + const stream = this._readerStream.static(patterns, options); + // After #235, replace it with an asynchronous iterator. + return new Promise((resolve, reject) => { + stream.once('error', reject); + stream.on('data', (entry) => entries.push(entry)); + stream.once('end', () => resolve(entries)); + }); + } +} +exports.default = ReaderAsync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts new file mode 100644 index 0000000..2af16b6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts @@ -0,0 +1,15 @@ +/// +import * as fs from 'fs'; +import * as fsStat from '@nodelib/fs.stat'; +import Settings from '../settings'; +import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; +export default abstract class Reader { + protected readonly _settings: Settings; + protected readonly _fsStatSettings: fsStat.Settings; + constructor(_settings: Settings); + abstract dynamic(root: string, options: ReaderOptions): T; + abstract static(patterns: Pattern[], options: ReaderOptions): T; + protected _getFullEntryPath(filepath: string): string; + protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; + protected _isFatalError(error: ErrnoException): boolean; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js new file mode 100644 index 0000000..7b40255 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); +const fsStat = require("@nodelib/fs.stat"); +const utils = require("../utils"); +class Reader { + constructor(_settings) { + this._settings = _settings; + this._fsStatSettings = new fsStat.Settings({ + followSymbolicLink: this._settings.followSymbolicLinks, + fs: this._settings.fs, + throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks + }); + } + _getFullEntryPath(filepath) { + return path.resolve(this._settings.cwd, filepath); + } + _makeEntry(stats, pattern) { + const entry = { + name: pattern, + path: pattern, + dirent: utils.fs.createDirentFromStats(pattern, stats) + }; + if (this._settings.stats) { + entry.stats = stats; + } + return entry; + } + _isFatalError(error) { + return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; + } +} +exports.default = Reader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts new file mode 100644 index 0000000..1c74cac --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts @@ -0,0 +1,14 @@ +/// +import { Readable } from 'stream'; +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderStream extends Reader { + protected _walkStream: typeof fsWalk.walkStream; + protected _stat: typeof fsStat.stat; + dynamic(root: string, options: ReaderOptions): Readable; + static(patterns: Pattern[], options: ReaderOptions): Readable; + private _getEntry; + private _getStat; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js new file mode 100644 index 0000000..317c6d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const stream_1 = require("stream"); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderStream extends reader_1.default { + constructor() { + super(...arguments); + this._walkStream = fsWalk.walkStream; + this._stat = fsStat.stat; + } + dynamic(root, options) { + return this._walkStream(root, options); + } + static(patterns, options) { + const filepaths = patterns.map(this._getFullEntryPath, this); + const stream = new stream_1.PassThrough({ objectMode: true }); + stream._write = (index, _enc, done) => { + return this._getEntry(filepaths[index], patterns[index], options) + .then((entry) => { + if (entry !== null && options.entryFilter(entry)) { + stream.push(entry); + } + if (index === filepaths.length - 1) { + stream.end(); + } + done(); + }) + .catch(done); + }; + for (let i = 0; i < filepaths.length; i++) { + stream.write(i); + } + return stream; + } + _getEntry(filepath, pattern, options) { + return this._getStat(filepath) + .then((stats) => this._makeEntry(stats, pattern)) + .catch((error) => { + if (options.errorFilter(error)) { + return null; + } + throw error; + }); + } + _getStat(filepath) { + return new Promise((resolve, reject) => { + this._stat(filepath, this._fsStatSettings, (error, stats) => { + return error === null ? resolve(stats) : reject(error); + }); + }); + } +} +exports.default = ReaderStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts new file mode 100644 index 0000000..c96ffee --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts @@ -0,0 +1,12 @@ +import * as fsStat from '@nodelib/fs.stat'; +import * as fsWalk from '@nodelib/fs.walk'; +import { Entry, Pattern, ReaderOptions } from '../types'; +import Reader from './reader'; +export default class ReaderSync extends Reader { + protected _walkSync: typeof fsWalk.walkSync; + protected _statSync: typeof fsStat.statSync; + dynamic(root: string, options: ReaderOptions): Entry[]; + static(patterns: Pattern[], options: ReaderOptions): Entry[]; + private _getEntry; + private _getStat; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js new file mode 100644 index 0000000..4704d65 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const fsStat = require("@nodelib/fs.stat"); +const fsWalk = require("@nodelib/fs.walk"); +const reader_1 = require("./reader"); +class ReaderSync extends reader_1.default { + constructor() { + super(...arguments); + this._walkSync = fsWalk.walkSync; + this._statSync = fsStat.statSync; + } + dynamic(root, options) { + return this._walkSync(root, options); + } + static(patterns, options) { + const entries = []; + for (const pattern of patterns) { + const filepath = this._getFullEntryPath(pattern); + const entry = this._getEntry(filepath, pattern, options); + if (entry === null || !options.entryFilter(entry)) { + continue; + } + entries.push(entry); + } + return entries; + } + _getEntry(filepath, pattern, options) { + try { + const stats = this._getStat(filepath); + return this._makeEntry(stats, pattern); + } + catch (error) { + if (options.errorFilter(error)) { + return null; + } + throw error; + } + } + _getStat(filepath) { + return this._statSync(filepath, this._fsStatSettings); + } +} +exports.default = ReaderSync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts new file mode 100644 index 0000000..76a74f8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts @@ -0,0 +1,164 @@ +import { FileSystemAdapter, Pattern } from './types'; +export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; +export type Options = { + /** + * Return the absolute path for entries. + * + * @default false + */ + absolute?: boolean; + /** + * If set to `true`, then patterns without slashes will be matched against + * the basename of the path if it contains slashes. + * + * @default false + */ + baseNameMatch?: boolean; + /** + * Enables Bash-like brace expansion. + * + * @default true + */ + braceExpansion?: boolean; + /** + * Enables a case-sensitive mode for matching files. + * + * @default true + */ + caseSensitiveMatch?: boolean; + /** + * Specifies the maximum number of concurrent requests from a reader to read + * directories. + * + * @default os.cpus().length + */ + concurrency?: number; + /** + * The current working directory in which to search. + * + * @default process.cwd() + */ + cwd?: string; + /** + * Specifies the maximum depth of a read directory relative to the start + * directory. + * + * @default Infinity + */ + deep?: number; + /** + * Allow patterns to match entries that begin with a period (`.`). + * + * @default false + */ + dot?: boolean; + /** + * Enables Bash-like `extglob` functionality. + * + * @default true + */ + extglob?: boolean; + /** + * Indicates whether to traverse descendants of symbolic link directories. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Custom implementation of methods for working with the file system. + * + * @default fs.* + */ + fs?: Partial; + /** + * Enables recursively repeats a pattern containing `**`. + * If `false`, `**` behaves exactly like `*`. + * + * @default true + */ + globstar?: boolean; + /** + * An array of glob patterns to exclude matches. + * This is an alternative way to use negative patterns. + * + * @default [] + */ + ignore?: Pattern[]; + /** + * Mark the directory path with the final slash. + * + * @default false + */ + markDirectories?: boolean; + /** + * Returns objects (instead of strings) describing entries. + * + * @default false + */ + objectMode?: boolean; + /** + * Return only directories. + * + * @default false + */ + onlyDirectories?: boolean; + /** + * Return only files. + * + * @default true + */ + onlyFiles?: boolean; + /** + * Enables an object mode (`objectMode`) with an additional `stats` field. + * + * @default false + */ + stats?: boolean; + /** + * By default this package suppress only `ENOENT` errors. + * Set to `true` to suppress any error. + * + * @default false + */ + suppressErrors?: boolean; + /** + * Throw an error when symbolic link is broken if `true` or safely + * return `lstat` call if `false`. + * + * @default false + */ + throwErrorOnBrokenSymbolicLink?: boolean; + /** + * Ensures that the returned entries are unique. + * + * @default true + */ + unique?: boolean; +}; +export default class Settings { + private readonly _options; + readonly absolute: boolean; + readonly baseNameMatch: boolean; + readonly braceExpansion: boolean; + readonly caseSensitiveMatch: boolean; + readonly concurrency: number; + readonly cwd: string; + readonly deep: number; + readonly dot: boolean; + readonly extglob: boolean; + readonly followSymbolicLinks: boolean; + readonly fs: FileSystemAdapter; + readonly globstar: boolean; + readonly ignore: Pattern[]; + readonly markDirectories: boolean; + readonly objectMode: boolean; + readonly onlyDirectories: boolean; + readonly onlyFiles: boolean; + readonly stats: boolean; + readonly suppressErrors: boolean; + readonly throwErrorOnBrokenSymbolicLink: boolean; + readonly unique: boolean; + constructor(_options?: Options); + private _getValue; + private _getFileSystemMethods; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js new file mode 100644 index 0000000..23f916c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; +const fs = require("fs"); +const os = require("os"); +/** + * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. + * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 + */ +const CPU_COUNT = Math.max(os.cpus().length, 1); +exports.DEFAULT_FILE_SYSTEM_ADAPTER = { + lstat: fs.lstat, + lstatSync: fs.lstatSync, + stat: fs.stat, + statSync: fs.statSync, + readdir: fs.readdir, + readdirSync: fs.readdirSync +}; +class Settings { + constructor(_options = {}) { + this._options = _options; + this.absolute = this._getValue(this._options.absolute, false); + this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); + this.braceExpansion = this._getValue(this._options.braceExpansion, true); + this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); + this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); + this.cwd = this._getValue(this._options.cwd, process.cwd()); + this.deep = this._getValue(this._options.deep, Infinity); + this.dot = this._getValue(this._options.dot, false); + this.extglob = this._getValue(this._options.extglob, true); + this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); + this.fs = this._getFileSystemMethods(this._options.fs); + this.globstar = this._getValue(this._options.globstar, true); + this.ignore = this._getValue(this._options.ignore, []); + this.markDirectories = this._getValue(this._options.markDirectories, false); + this.objectMode = this._getValue(this._options.objectMode, false); + this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); + this.onlyFiles = this._getValue(this._options.onlyFiles, true); + this.stats = this._getValue(this._options.stats, false); + this.suppressErrors = this._getValue(this._options.suppressErrors, false); + this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); + this.unique = this._getValue(this._options.unique, true); + if (this.onlyDirectories) { + this.onlyFiles = false; + } + if (this.stats) { + this.objectMode = true; + } + // Remove the cast to the array in the next major (#404). + this.ignore = [].concat(this.ignore); + } + _getValue(option, value) { + return option === undefined ? value : option; + } + _getFileSystemMethods(methods = {}) { + return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); + } +} +exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts new file mode 100644 index 0000000..6506caf --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts @@ -0,0 +1,31 @@ +/// +import * as fsWalk from '@nodelib/fs.walk'; +export type ErrnoException = NodeJS.ErrnoException; +export type Entry = fsWalk.Entry; +export type EntryItem = string | Entry; +export type Pattern = string; +export type PatternRe = RegExp; +export type PatternsGroup = Record; +export type ReaderOptions = fsWalk.Options & { + transform(entry: Entry): EntryItem; + deepFilter: DeepFilterFunction; + entryFilter: EntryFilterFunction; + errorFilter: ErrorFilterFunction; + fs: FileSystemAdapter; + stats: boolean; +}; +export type ErrorFilterFunction = fsWalk.ErrorFilterFunction; +export type EntryFilterFunction = fsWalk.EntryFilterFunction; +export type DeepFilterFunction = fsWalk.DeepFilterFunction; +export type EntryTransformerFunction = (entry: Entry) => EntryItem; +export type MicromatchOptions = { + dot?: boolean; + matchBase?: boolean; + nobrace?: boolean; + nocase?: boolean; + noext?: boolean; + noglobstar?: boolean; + posix?: boolean; + strictSlashes?: boolean; +}; +export type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts new file mode 100644 index 0000000..98e7325 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts @@ -0,0 +1,2 @@ +export declare function flatten(items: T[][]): T[]; +export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js new file mode 100644 index 0000000..50c406e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.splitWhen = exports.flatten = void 0; +function flatten(items) { + return items.reduce((collection, item) => [].concat(collection, item), []); +} +exports.flatten = flatten; +function splitWhen(items, predicate) { + const result = [[]]; + let groupIndex = 0; + for (const item of items) { + if (predicate(item)) { + groupIndex++; + result[groupIndex] = []; + } + else { + result[groupIndex].push(item); + } + } + return result; +} +exports.splitWhen = splitWhen; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts new file mode 100644 index 0000000..1c08d3b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts @@ -0,0 +1,2 @@ +import { ErrnoException } from '../types'; +export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js new file mode 100644 index 0000000..f0bd801 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEnoentCodeError = void 0; +function isEnoentCodeError(error) { + return error.code === 'ENOENT'; +} +exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts new file mode 100644 index 0000000..64c61ce --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts @@ -0,0 +1,4 @@ +/// +import * as fs from 'fs'; +import { Dirent } from '@nodelib/fs.walk'; +export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js new file mode 100644 index 0000000..ace7c74 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDirentFromStats = void 0; +class DirentFromStats { + constructor(name, stats) { + this.name = name; + this.isBlockDevice = stats.isBlockDevice.bind(stats); + this.isCharacterDevice = stats.isCharacterDevice.bind(stats); + this.isDirectory = stats.isDirectory.bind(stats); + this.isFIFO = stats.isFIFO.bind(stats); + this.isFile = stats.isFile.bind(stats); + this.isSocket = stats.isSocket.bind(stats); + this.isSymbolicLink = stats.isSymbolicLink.bind(stats); + } +} +function createDirentFromStats(name, stats) { + return new DirentFromStats(name, stats); +} +exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts new file mode 100644 index 0000000..f634cad --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts @@ -0,0 +1,8 @@ +import * as array from './array'; +import * as errno from './errno'; +import * as fs from './fs'; +import * as path from './path'; +import * as pattern from './pattern'; +import * as stream from './stream'; +import * as string from './string'; +export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js new file mode 100644 index 0000000..0f92c16 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; +const array = require("./array"); +exports.array = array; +const errno = require("./errno"); +exports.errno = errno; +const fs = require("./fs"); +exports.fs = fs; +const path = require("./path"); +exports.path = path; +const pattern = require("./pattern"); +exports.pattern = pattern; +const stream = require("./stream"); +exports.stream = stream; +const string = require("./string"); +exports.string = string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts new file mode 100644 index 0000000..0b13f4b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts @@ -0,0 +1,13 @@ +import { Pattern } from '../types'; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +export declare function unixify(filepath: string): string; +export declare function makeAbsolute(cwd: string, filepath: string): string; +export declare function removeLeadingDotSegment(entry: string): string; +export declare const escape: typeof escapeWindowsPath; +export declare function escapeWindowsPath(pattern: Pattern): Pattern; +export declare function escapePosixPath(pattern: Pattern): Pattern; +export declare const convertPathToPattern: typeof convertWindowsPathToPattern; +export declare function convertWindowsPathToPattern(filepath: string): Pattern; +export declare function convertPosixPathToPattern(filepath: string): Pattern; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js new file mode 100644 index 0000000..7b53b39 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; +const os = require("os"); +const path = require("path"); +const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; +const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ +/** + * All non-escaped special characters. + * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. + * Windows: (){}[], !+@ before (, ! at the beginning. + */ +const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; +const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; +/** + * The device path (\\.\ or \\?\). + * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths + */ +const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; +/** + * All backslashes except those escaping special characters. + * Windows: !()+@{} + * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions + */ +const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; +/** + * Designed to work only with simple paths: `dir\\file`. + */ +function unixify(filepath) { + return filepath.replace(/\\/g, '/'); +} +exports.unixify = unixify; +function makeAbsolute(cwd, filepath) { + return path.resolve(cwd, filepath); +} +exports.makeAbsolute = makeAbsolute; +function removeLeadingDotSegment(entry) { + // We do not use `startsWith` because this is 10x slower than current implementation for some cases. + // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with + if (entry.charAt(0) === '.') { + const secondCharactery = entry.charAt(1); + if (secondCharactery === '/' || secondCharactery === '\\') { + return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); + } + } + return entry; +} +exports.removeLeadingDotSegment = removeLeadingDotSegment; +exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; +function escapeWindowsPath(pattern) { + return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escapeWindowsPath = escapeWindowsPath; +function escapePosixPath(pattern) { + return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); +} +exports.escapePosixPath = escapePosixPath; +exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; +function convertWindowsPathToPattern(filepath) { + return escapeWindowsPath(filepath) + .replace(DOS_DEVICE_PATH_RE, '//$1') + .replace(WINDOWS_BACKSLASHES_RE, '/'); +} +exports.convertWindowsPathToPattern = convertWindowsPathToPattern; +function convertPosixPathToPattern(filepath) { + return escapePosixPath(filepath); +} +exports.convertPosixPathToPattern = convertPosixPathToPattern; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts new file mode 100644 index 0000000..e3598a9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts @@ -0,0 +1,49 @@ +import { MicromatchOptions, Pattern, PatternRe } from '../types'; +type PatternTypeOptions = { + braceExpansion?: boolean; + caseSensitiveMatch?: boolean; + extglob?: boolean; +}; +export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; +export declare function convertToPositivePattern(pattern: Pattern): Pattern; +export declare function convertToNegativePattern(pattern: Pattern): Pattern; +export declare function isNegativePattern(pattern: Pattern): boolean; +export declare function isPositivePattern(pattern: Pattern): boolean; +export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; +export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; +export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; +export declare function getBaseDirectory(pattern: Pattern): string; +export declare function hasGlobStar(pattern: Pattern): boolean; +export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; +export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; +export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; +export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; +export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; +export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; +export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; +export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +export declare function removeDuplicateSlashes(pattern: string): string; +export declare function partitionAbsoluteAndRelative(patterns: Pattern[]): Pattern[][]; +export declare function isAbsolute(pattern: string): boolean; +export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js new file mode 100644 index 0000000..b2924e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js @@ -0,0 +1,206 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isAbsolute = exports.partitionAbsoluteAndRelative = exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; +const path = require("path"); +const globParent = require("glob-parent"); +const micromatch = require("micromatch"); +const GLOBSTAR = '**'; +const ESCAPE_SYMBOL = '\\'; +const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; +const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; +const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; +const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; +const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; +/** + * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. + * The latter is due to the presence of the device path at the beginning of the UNC path. + */ +const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; +function isStaticPattern(pattern, options = {}) { + return !isDynamicPattern(pattern, options); +} +exports.isStaticPattern = isStaticPattern; +function isDynamicPattern(pattern, options = {}) { + /** + * A special case with an empty string is necessary for matching patterns that start with a forward slash. + * An empty string cannot be a dynamic pattern. + * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. + */ + if (pattern === '') { + return false; + } + /** + * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check + * filepath directly (without read directory). + */ + if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { + return true; + } + if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { + return true; + } + if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { + return true; + } + return false; +} +exports.isDynamicPattern = isDynamicPattern; +function hasBraceExpansion(pattern) { + const openingBraceIndex = pattern.indexOf('{'); + if (openingBraceIndex === -1) { + return false; + } + const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); + if (closingBraceIndex === -1) { + return false; + } + const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); + return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); +} +function convertToPositivePattern(pattern) { + return isNegativePattern(pattern) ? pattern.slice(1) : pattern; +} +exports.convertToPositivePattern = convertToPositivePattern; +function convertToNegativePattern(pattern) { + return '!' + pattern; +} +exports.convertToNegativePattern = convertToNegativePattern; +function isNegativePattern(pattern) { + return pattern.startsWith('!') && pattern[1] !== '('; +} +exports.isNegativePattern = isNegativePattern; +function isPositivePattern(pattern) { + return !isNegativePattern(pattern); +} +exports.isPositivePattern = isPositivePattern; +function getNegativePatterns(patterns) { + return patterns.filter(isNegativePattern); +} +exports.getNegativePatterns = getNegativePatterns; +function getPositivePatterns(patterns) { + return patterns.filter(isPositivePattern); +} +exports.getPositivePatterns = getPositivePatterns; +/** + * Returns patterns that can be applied inside the current directory. + * + * @example + * // ['./*', '*', 'a/*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsInsideCurrentDirectory(patterns) { + return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); +} +exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; +/** + * Returns patterns to be expanded relative to (outside) the current directory. + * + * @example + * // ['../*', './../*'] + * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) + */ +function getPatternsOutsideCurrentDirectory(patterns) { + return patterns.filter(isPatternRelatedToParentDirectory); +} +exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; +function isPatternRelatedToParentDirectory(pattern) { + return pattern.startsWith('..') || pattern.startsWith('./..'); +} +exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; +function getBaseDirectory(pattern) { + return globParent(pattern, { flipBackslashes: false }); +} +exports.getBaseDirectory = getBaseDirectory; +function hasGlobStar(pattern) { + return pattern.includes(GLOBSTAR); +} +exports.hasGlobStar = hasGlobStar; +function endsWithSlashGlobStar(pattern) { + return pattern.endsWith('/' + GLOBSTAR); +} +exports.endsWithSlashGlobStar = endsWithSlashGlobStar; +function isAffectDepthOfReadingPattern(pattern) { + const basename = path.basename(pattern); + return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); +} +exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; +function expandPatternsWithBraceExpansion(patterns) { + return patterns.reduce((collection, pattern) => { + return collection.concat(expandBraceExpansion(pattern)); + }, []); +} +exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; +function expandBraceExpansion(pattern) { + const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); + /** + * Sort the patterns by length so that the same depth patterns are processed side by side. + * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` + */ + patterns.sort((a, b) => a.length - b.length); + /** + * Micromatch can return an empty string in the case of patterns like `{a,}`. + */ + return patterns.filter((pattern) => pattern !== ''); +} +exports.expandBraceExpansion = expandBraceExpansion; +function getPatternParts(pattern, options) { + let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); + /** + * The scan method returns an empty array in some cases. + * See micromatch/picomatch#58 for more details. + */ + if (parts.length === 0) { + parts = [pattern]; + } + /** + * The scan method does not return an empty part for the pattern with a forward slash. + * This is another part of micromatch/picomatch#58. + */ + if (parts[0].startsWith('/')) { + parts[0] = parts[0].slice(1); + parts.unshift(''); + } + return parts; +} +exports.getPatternParts = getPatternParts; +function makeRe(pattern, options) { + return micromatch.makeRe(pattern, options); +} +exports.makeRe = makeRe; +function convertPatternsToRe(patterns, options) { + return patterns.map((pattern) => makeRe(pattern, options)); +} +exports.convertPatternsToRe = convertPatternsToRe; +function matchAny(entry, patternsRe) { + return patternsRe.some((patternRe) => patternRe.test(entry)); +} +exports.matchAny = matchAny; +/** + * This package only works with forward slashes as a path separator. + * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. + */ +function removeDuplicateSlashes(pattern) { + return pattern.replace(DOUBLE_SLASH_RE, '/'); +} +exports.removeDuplicateSlashes = removeDuplicateSlashes; +function partitionAbsoluteAndRelative(patterns) { + const absolute = []; + const relative = []; + for (const pattern of patterns) { + if (isAbsolute(pattern)) { + absolute.push(pattern); + } + else { + relative.push(pattern); + } + } + return [absolute, relative]; +} +exports.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; +function isAbsolute(pattern) { + return path.isAbsolute(pattern); +} +exports.isAbsolute = isAbsolute; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts new file mode 100644 index 0000000..4daf913 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts @@ -0,0 +1,4 @@ +/// +/// +import { Readable } from 'stream'; +export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js new file mode 100644 index 0000000..b32028c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.merge = void 0; +const merge2 = require("merge2"); +function merge(streams) { + const mergedStream = merge2(streams); + streams.forEach((stream) => { + stream.once('error', (error) => mergedStream.emit('error', error)); + }); + mergedStream.once('close', () => propagateCloseEventToSources(streams)); + mergedStream.once('end', () => propagateCloseEventToSources(streams)); + return mergedStream; +} +exports.merge = merge; +function propagateCloseEventToSources(streams) { + streams.forEach((stream) => stream.emit('close')); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts new file mode 100644 index 0000000..c884735 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts @@ -0,0 +1,2 @@ +export declare function isString(input: unknown): input is string; +export declare function isEmpty(input: string): boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js new file mode 100644 index 0000000..76e7ea5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isEmpty = exports.isString = void 0; +function isString(input) { + return typeof input === 'string'; +} +exports.isString = isString; +function isEmpty(input) { + return input === ''; +} +exports.isEmpty = isEmpty; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json new file mode 100644 index 0000000..e910de9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "fast-glob", + "version": "3.3.3", + "description": "It's a very fast and efficient glob library for Node.js", + "license": "MIT", + "repository": "mrmlnc/fast-glob", + "author": { + "name": "Denis Malinochkin", + "url": "https://mrmlnc.com" + }, + "engines": { + "node": ">=8.6.0" + }, + "main": "out/index.js", + "typings": "out/index.d.ts", + "files": [ + "out", + "!out/{benchmark,tests}", + "!out/**/*.map", + "!out/**/*.spec.*" + ], + "keywords": [ + "glob", + "patterns", + "fast", + "implementation" + ], + "devDependencies": { + "@nodelib/fs.macchiato": "^1.0.1", + "@types/glob-parent": "^5.1.0", + "@types/merge2": "^1.1.4", + "@types/micromatch": "^4.0.0", + "@types/mocha": "^5.2.7", + "@types/node": "^14.18.53", + "@types/picomatch": "^2.3.0", + "@types/sinon": "^7.5.0", + "bencho": "^0.1.1", + "eslint": "^6.5.1", + "eslint-config-mrmlnc": "^1.1.0", + "execa": "^7.1.1", + "fast-glob": "^3.0.4", + "fdir": "6.0.1", + "glob": "^10.0.0", + "hereby": "^1.8.1", + "mocha": "^6.2.1", + "rimraf": "^5.0.0", + "sinon": "^7.5.0", + "snap-shot-it": "^7.9.10", + "typescript": "^4.9.5" + }, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "scripts": { + "clean": "rimraf out", + "lint": "eslint \"src/**/*.ts\" --cache", + "compile": "tsc", + "test": "mocha \"out/**/*.spec.js\" -s 0", + "test:e2e": "mocha \"out/**/*.e2e.js\" -s 0", + "test:e2e:sync": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(sync\\)\"", + "test:e2e:async": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(async\\)\"", + "test:e2e:stream": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(stream\\)\"", + "build": "npm run clean && npm run compile && npm run lint && npm test", + "watch": "npm run clean && npm run compile -- -- --sourceMap --watch", + "bench:async": "npm run bench:product:async && npm run bench:regression:async", + "bench:stream": "npm run bench:product:stream && npm run bench:regression:stream", + "bench:sync": "npm run bench:product:sync && npm run bench:regression:sync", + "bench:product": "npm run bench:product:async && npm run bench:product:sync && npm run bench:product:stream", + "bench:product:async": "hereby bench:product:async", + "bench:product:sync": "hereby bench:product:sync", + "bench:product:stream": "hereby bench:product:stream", + "bench:regression": "npm run bench:regression:async && npm run bench:regression:sync && npm run bench:regression:stream", + "bench:regression:async": "hereby bench:regression:async", + "bench:regression:sync": "hereby bench:regression:sync", + "bench:regression:stream": "hereby bench:regression:stream" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml new file mode 100644 index 0000000..7e7cbe1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 + ignore: + - dependency-name: standard + versions: + - 16.0.3 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml new file mode 100644 index 0000000..09dc7a3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml @@ -0,0 +1,75 @@ +name: ci + +on: [push, pull_request] + +jobs: + legacy: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --production && npm install tape + + - name: Run tests + run: | + npm run legacy + + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [18.x, 20.x, 22.x] + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test + + types: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v3 + with: + node-version: 16 + + - name: Install + run: | + npm install + + - name: Run types tests + run: | + npm run typescript diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE new file mode 100644 index 0000000..27c7bb4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015-2020, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md new file mode 100644 index 0000000..1644111 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md @@ -0,0 +1,312 @@ +# fastq + +![ci][ci-url] +[![npm version][npm-badge]][npm-url] + +Fast, in memory work queue. + +Benchmarks (1 million tasks): + +* setImmediate: 812ms +* fastq: 854ms +* async.queue: 1298ms +* neoAsync.queue: 1249ms + +Obtained on node 12.16.1, on a dedicated server. + +If you need zero-overhead series function call, check out +[fastseries](http://npm.im/fastseries). For zero-overhead parallel +function call, check out [fastparallel](http://npm.im/fastparallel). + +[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) + + * Installation + * Usage + * API + * Licence & copyright + +## Install + +`npm i fastq --save` + +## Usage (callback API) + +```js +'use strict' + +const queue = require('fastq')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, arg * 2) +} +``` + +## Usage (promise API) + +```js +const queue = require('fastq').promise(worker, 1) + +async function worker (arg) { + return arg * 2 +} + +async function run () { + const result = await queue.push(42) + console.log('the result is', result) +} + +run() +``` + +### Setting "this" + +```js +'use strict' + +const that = { hello: 'world' } +const queue = require('fastq')(that, worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log(this) + console.log('the result is', result) +}) + +function worker (arg, cb) { + console.log(this) + cb(null, arg * 2) +} +``` + +### Using with TypeScript (callback API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queue, done } from "fastq"; + +type Task = { + id: number +} + +const q: queue = fastq(worker, 1) + +q.push({ id: 42}) + +function worker (arg: Task, cb: done) { + console.log(arg.id) + cb(null) +} +``` + +### Using with TypeScript (promise API) + +```ts +'use strict' + +import * as fastq from "fastq"; +import type { queueAsPromised } from "fastq"; + +type Task = { + id: number +} + +const q: queueAsPromised = fastq.promise(asyncWorker, 1) + +q.push({ id: 42}).catch((err) => console.error(err)) + +async function asyncWorker (arg: Task): Promise { + // No need for a try-catch block, fastq handles errors automatically + console.log(arg.id) +} +``` + +## API + +* fastqueue() +* queue#push() +* queue#unshift() +* queue#pause() +* queue#resume() +* queue#idle() +* queue#length() +* queue#getQueue() +* queue#kill() +* queue#killAndDrain() +* queue#error() +* queue#concurrency +* queue#drain +* queue#empty +* queue#saturated +* fastqueue.promise() + +------------------------------------------------------- + +### fastqueue([that], worker, concurrency) + +Creates a new queue. + +Arguments: + +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + +------------------------------------------------------- + +### queue.push(task, done) + +Add a task at the end of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.unshift(task, done) + +Add a task at the beginning of the queue. `done(err, result)` will be called +when the task was processed. + +------------------------------------------------------- + +### queue.pause() + +Pause the processing of tasks. Currently worked tasks are not +stopped. + +------------------------------------------------------- + +### queue.resume() + +Resume the processing of tasks. + +------------------------------------------------------- + +### queue.idle() + +Returns `false` if there are tasks being processed or waiting to be processed. +`true` otherwise. + +------------------------------------------------------- + +### queue.length() + +Returns the number of tasks waiting to be processed (in the queue). + +------------------------------------------------------- + +### queue.getQueue() + +Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks + +------------------------------------------------------- + +### queue.kill() + +Removes all tasks waiting to be processed, and reset `drain` to an empty +function. + +------------------------------------------------------- + +### queue.killAndDrain() + +Same than `kill` but the `drain` function will be called before reset to empty. + +------------------------------------------------------- + +### queue.error(handler) + +Set a global error handler. `handler(err, task)` will be called +each time a task is completed, `err` will be not null if the task has thrown an error. + +------------------------------------------------------- + +### queue.concurrency + +Property that returns the number of concurrent tasks that could be executed in +parallel. It can be altered at runtime. + +------------------------------------------------------- + +### queue.paused + +Property (Read-Only) that returns `true` when the queue is in a paused state. + +------------------------------------------------------- + +### queue.drain + +Function that will be called when the last +item from the queue has been processed by a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.empty + +Function that will be called when the last +item from the queue has been assigned to a worker. +It can be altered at runtime. + +------------------------------------------------------- + +### queue.saturated + +Function that will be called when the queue hits the concurrency +limit. +It can be altered at runtime. + +------------------------------------------------------- + +### fastqueue.promise([that], worker(arg), concurrency) + +Creates a new queue with `Promise` apis. It also offers all the methods +and properties of the object returned by [`fastqueue`](#fastqueue) with the modified +[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. + +Node v10+ is required to use the promisified version. + +Arguments: +* `that`, optional context of the `worker` function. +* `worker`, worker function, it would be called with `that` as `this`, + if that is specified. It MUST return a `Promise`. +* `concurrency`, number of concurrent tasks that could be executed in + parallel. + + +#### queue.push(task) => Promise + +Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.unshift(task) => Promise + +Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) +when the task is completed successfully (unsuccessfully). + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + + +#### queue.drained() => Promise + +Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. + +This promise could be ignored as it will not lead to a `'unhandledRejection'`. + +## License + +ISC + +[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg +[npm-badge]: https://badge.fury.io/js/fastq.svg +[npm-url]: https://badge.fury.io/js/fastq diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md new file mode 100644 index 0000000..dd9f1d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 1.x | :white_check_mark: | +| < 1.0 | :x: | + +## Reporting a Vulnerability + +Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js new file mode 100644 index 0000000..4eaa829 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js @@ -0,0 +1,66 @@ +'use strict' + +const max = 1000000 +const fastqueue = require('./')(worker, 1) +const { promisify } = require('util') +const immediate = promisify(setImmediate) +const qPromise = require('./').promise(immediate, 1) +const async = require('async') +const neo = require('neo-async') +const asyncqueue = async.queue(worker, 1) +const neoqueue = neo.queue(worker, 1) + +function bench (func, done) { + const key = max + '*' + func.name + let count = -1 + + console.time(key) + end() + + function end () { + if (++count < max) { + func(end) + } else { + console.timeEnd(key) + if (done) { + done() + } + } + } +} + +function benchFastQ (done) { + fastqueue.push(42, done) +} + +function benchAsyncQueue (done) { + asyncqueue.push(42, done) +} + +function benchNeoQueue (done) { + neoqueue.push(42, done) +} + +function worker (arg, cb) { + setImmediate(cb) +} + +function benchSetImmediate (cb) { + worker(42, cb) +} + +function benchFastQPromise (done) { + qPromise.push(42).then(function () { done() }, done) +} + +function runBench (done) { + async.eachSeries([ + benchSetImmediate, + benchFastQ, + benchNeoQueue, + benchAsyncQueue, + benchFastQPromise + ], bench, done) +} + +runBench(runBench) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js new file mode 100644 index 0000000..665fdc8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js @@ -0,0 +1,14 @@ +'use strict' + +/* eslint-disable no-var */ + +var queue = require('./')(worker, 1) + +queue.push(42, function (err, result) { + if (err) { throw err } + console.log('the result is', result) +}) + +function worker (arg, cb) { + cb(null, 42 * 2) +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs new file mode 100644 index 0000000..81be789 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs @@ -0,0 +1,11 @@ +import { promise as queueAsPromised } from './queue.js' + +/* eslint-disable */ + +const queue = queueAsPromised(worker, 1) + +console.log('the result is', await queue.push(42)) + +async function worker (arg) { + return 42 * 2 +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts new file mode 100644 index 0000000..817cdb5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts @@ -0,0 +1,57 @@ +declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue +declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue + +declare namespace fastq { + type worker = (this: C, task: T, cb: fastq.done) => void + type asyncWorker = (this: C, task: T) => Promise + type done = (err: Error | null, result?: R) => void + type errorHandler = (err: Error, task: T) => void + + interface queue { + /** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */ + push(task: T, done?: done): void + /** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */ + unshift(task: T, done?: done): void + /** Pause the processing of tasks. Currently worked tasks are not stopped. */ + pause(): any + /** Resume the processing of tasks. */ + resume(): any + running(): number + /** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */ + idle(): boolean + /** Returns the number of tasks waiting to be processed (in the queue). */ + length(): number + /** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */ + getQueue(): T[] + /** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */ + kill(): any + /** Same than `kill` but the `drain` function will be called before reset to empty. */ + killAndDrain(): any + /** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */ + error(handler: errorHandler): void + /** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */ + concurrency: number + /** Property (Read-Only) that returns `true` when the queue is in a paused state. */ + readonly paused: boolean + /** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */ + drain(): any + /** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */ + empty: () => void + /** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */ + saturated: () => void + } + + interface queueAsPromised extends queue { + /** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ + push(task: T): Promise + /** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ + unshift(task: T): Promise + /** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */ + drained(): Promise + } + + function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised + function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised +} + +export = fastq diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json new file mode 100644 index 0000000..989151f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json @@ -0,0 +1,53 @@ +{ + "name": "fastq", + "version": "1.19.1", + "description": "Fast, in memory work queue", + "main": "queue.js", + "scripts": { + "lint": "standard --verbose | snazzy", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit", + "typescript": "tsc --project ./test/tsconfig.json", + "legacy": "tape test/test.js" + }, + "pre-commit": [ + "test", + "typescript" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/fastq.git" + }, + "keywords": [ + "fast", + "queue", + "async", + "worker" + ], + "author": "Matteo Collina ", + "license": "ISC", + "bugs": { + "url": "https://github.com/mcollina/fastq/issues" + }, + "homepage": "https://github.com/mcollina/fastq#readme", + "devDependencies": { + "async": "^3.1.0", + "neo-async": "^2.6.1", + "nyc": "^17.0.0", + "pre-commit": "^1.2.2", + "snazzy": "^9.0.0", + "standard": "^16.0.0", + "tape": "^5.0.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "reusify": "^1.0.4" + }, + "standard": { + "ignore": [ + "example.mjs" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js new file mode 100644 index 0000000..7ea8a31 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js @@ -0,0 +1,311 @@ +'use strict' + +/* eslint-disable no-var */ + +var reusify = require('reusify') + +function fastqueue (context, worker, _concurrency) { + if (typeof context === 'function') { + _concurrency = worker + worker = context + context = null + } + + if (!(_concurrency >= 1)) { + throw new Error('fastqueue concurrency must be equal to or greater than 1') + } + + var cache = reusify(Task) + var queueHead = null + var queueTail = null + var _running = 0 + var errorHandler = null + + var self = { + push: push, + drain: noop, + saturated: noop, + pause: pause, + paused: false, + + get concurrency () { + return _concurrency + }, + set concurrency (value) { + if (!(value >= 1)) { + throw new Error('fastqueue concurrency must be equal to or greater than 1') + } + _concurrency = value + + if (self.paused) return + for (; queueHead && _running < _concurrency;) { + _running++ + release() + } + }, + + running: running, + resume: resume, + idle: idle, + length: length, + getQueue: getQueue, + unshift: unshift, + empty: noop, + kill: kill, + killAndDrain: killAndDrain, + error: error + } + + return self + + function running () { + return _running + } + + function pause () { + self.paused = true + } + + function length () { + var current = queueHead + var counter = 0 + + while (current) { + current = current.next + counter++ + } + + return counter + } + + function getQueue () { + var current = queueHead + var tasks = [] + + while (current) { + tasks.push(current.value) + current = current.next + } + + return tasks + } + + function resume () { + if (!self.paused) return + self.paused = false + if (queueHead === null) { + _running++ + release() + return + } + for (; queueHead && _running < _concurrency;) { + _running++ + release() + } + } + + function idle () { + return _running === 0 && self.length() === 0 + } + + function push (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running >= _concurrency || self.paused) { + if (queueTail) { + queueTail.next = current + queueTail = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function unshift (value, done) { + var current = cache.get() + + current.context = context + current.release = release + current.value = value + current.callback = done || noop + current.errorHandler = errorHandler + + if (_running >= _concurrency || self.paused) { + if (queueHead) { + current.next = queueHead + queueHead = current + } else { + queueHead = current + queueTail = current + self.saturated() + } + } else { + _running++ + worker.call(context, current.value, current.worked) + } + } + + function release (holder) { + if (holder) { + cache.release(holder) + } + var next = queueHead + if (next && _running <= _concurrency) { + if (!self.paused) { + if (queueTail === queueHead) { + queueTail = null + } + queueHead = next.next + next.next = null + worker.call(context, next.value, next.worked) + if (queueTail === null) { + self.empty() + } + } else { + _running-- + } + } else if (--_running === 0) { + self.drain() + } + } + + function kill () { + queueHead = null + queueTail = null + self.drain = noop + } + + function killAndDrain () { + queueHead = null + queueTail = null + self.drain() + self.drain = noop + } + + function error (handler) { + errorHandler = handler + } +} + +function noop () {} + +function Task () { + this.value = null + this.callback = noop + this.next = null + this.release = noop + this.context = null + this.errorHandler = null + + var self = this + + this.worked = function worked (err, result) { + var callback = self.callback + var errorHandler = self.errorHandler + var val = self.value + self.value = null + self.callback = noop + if (self.errorHandler) { + errorHandler(err, val) + } + callback.call(self.context, err, result) + self.release(self) + } +} + +function queueAsPromised (context, worker, _concurrency) { + if (typeof context === 'function') { + _concurrency = worker + worker = context + context = null + } + + function asyncWrapper (arg, cb) { + worker.call(this, arg) + .then(function (res) { + cb(null, res) + }, cb) + } + + var queue = fastqueue(context, asyncWrapper, _concurrency) + + var pushCb = queue.push + var unshiftCb = queue.unshift + + queue.push = push + queue.unshift = unshift + queue.drained = drained + + return queue + + function push (value) { + var p = new Promise(function (resolve, reject) { + pushCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function unshift (value) { + var p = new Promise(function (resolve, reject) { + unshiftCb(value, function (err, result) { + if (err) { + reject(err) + return + } + resolve(result) + }) + }) + + // Let's fork the promise chain to + // make the error bubble up to the user but + // not lead to a unhandledRejection + p.catch(noop) + + return p + } + + function drained () { + var p = new Promise(function (resolve) { + process.nextTick(function () { + if (queue.idle()) { + resolve() + } else { + var previousDrain = queue.drain + queue.drain = function () { + if (typeof previousDrain === 'function') previousDrain() + resolve() + queue.drain = previousDrain + } + } + }) + }) + + return p + } +} + +module.exports = fastqueue +module.exports.promise = queueAsPromised diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts new file mode 100644 index 0000000..a47d441 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts @@ -0,0 +1,83 @@ +import * as fastq from '../' +import { promise as queueAsPromised } from '../' + +// Basic example + +const queue = fastq(worker, 1) + +queue.push('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.push('push without cb') + +queue.concurrency + +queue.drain() + +queue.empty = () => undefined + +console.log('the queue tasks are', queue.getQueue()) + +queue.idle() + +queue.kill() + +queue.killAndDrain() + +queue.length + +queue.pause() + +queue.resume() + +queue.running() + +queue.saturated = () => undefined + +queue.unshift('world', (err, result) => { + if (err) throw err + console.log('the result is', result) +}) + +queue.unshift('unshift without cb') + +function worker(task: any, cb: fastq.done) { + cb(null, 'hello ' + task) +} + +// Generics example + +interface GenericsContext { + base: number; +} + +const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) + +genericsQueue.push(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +genericsQueue.unshift(7, (err, done) => { + if (err) throw err + console.log('the result is', done) +}) + +function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { + cb(null, 'the meaning of life is ' + (this.base * task)) +} + +const queue2 = queueAsPromised(asyncWorker, 1) + +async function asyncWorker(task: any) { + return 'hello ' + task +} + +async function run () { + await queue.push(42) + await queue.unshift(42) +} + +run() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js new file mode 100644 index 0000000..45349a4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js @@ -0,0 +1,291 @@ +'use strict' + +const test = require('tape') +const buildQueue = require('../').promise +const { promisify } = require('util') +const sleep = promisify(setTimeout) +const immediate = promisify(setImmediate) + +test('concurrency', function (t) { + t.plan(2) + t.throws(buildQueue.bind(null, worker, 0)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + async function worker (arg) { + return true + } +}) + +test('worker execution', async function (t) { + const queue = buildQueue(worker, 1) + + const result = await queue.push(42) + + t.equal(result, true, 'result matches') + + async function worker (arg) { + t.equal(arg, 42) + return true + } +}) + +test('limit', async function (t) { + const queue = buildQueue(worker, 1) + + const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) + t.equal(res1, 10, 'the result matches') + t.equal(res2, 0, 'the result matches') + + async function worker (arg) { + await sleep(arg) + return arg + } +}) + +test('multiple executions', async function (t) { + const queue = buildQueue(worker, 1) + const toExec = [1, 2, 3, 4, 5] + const expected = ['a', 'b', 'c', 'd', 'e'] + let count = 0 + + await Promise.all(toExec.map(async function (task, i) { + const result = await queue.push(task) + t.equal(result, expected[i], 'the result matches') + })) + + async function worker (arg) { + t.equal(arg, toExec[count], 'arg matches') + return expected[count++] + } +}) + +test('drained', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length) + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() + + t.equal(count, toExec.length * 2) +}) + +test('drained with exception should not throw', async function (t) { + const queue = buildQueue(worker, 2) + + const toExec = new Array(10).fill(10) + + async function worker () { + throw new Error('foo') + } + + toExec.forEach(function (i) { + queue.push(i) + }) + + await queue.drained() +}) + +test('drained with drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + const toExec = new Array(10).fill(10) + let count = 0 + + async function worker (arg) { + await sleep(arg) + count++ + } + + toExec.forEach(function () { + queue.push() + }) + + await queue.drained() + + t.equal(count, toExec.length) + t.equal(drainCalled, true) +}) + +test('drained while idle should resolve', async function (t) { + const queue = buildQueue(worker, 2) + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() +}) + +test('drained while idle should not call the drain function', async function (t) { + let drainCalled = false + const queue = buildQueue(worker, 2) + + queue.drain = function () { + drainCalled = true + } + + async function worker (arg) { + await sleep(arg) + } + + await queue.drained() + + t.equal(drainCalled, false) +}) + +test('set this', async function (t) { + t.plan(1) + const that = {} + const queue = buildQueue(that, worker, 1) + + await queue.push(42) + + async function worker (arg) { + t.equal(this, that, 'this matches') + } +}) + +test('unshift', async function (t) { + const queue = buildQueue(worker, 1) + const expected = [1, 2, 3, 4] + + await Promise.all([ + queue.push(1), + queue.push(4), + queue.unshift(3), + queue.unshift(2) + ]) + + t.is(expected.length, 0) + + async function worker (arg) { + t.equal(expected.shift(), arg, 'tasks come in order') + } +}) + +test('push with worker throwing error', async function (t) { + t.plan(5) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + try { + await q.push(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('unshift with worker throwing error', async function (t) { + t.plan(2) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + try { + await q.unshift(42) + } catch (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + } +}) + +test('no unhandledRejection (push)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.push(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) + +test('no unhandledRejection (unshift)', async function (t) { + function handleRejection () { + t.fail('unhandledRejection') + } + process.once('unhandledRejection', handleRejection) + const q = buildQueue(async function (task, cb) { + throw new Error('test error') + }, 1) + + q.unshift(42) + + await immediate() + process.removeListener('unhandledRejection', handleRejection) +}) + +test('drained should resolve after async tasks complete', async function (t) { + const logs = [] + + async function processTask () { + await new Promise(resolve => setTimeout(resolve, 0)) + logs.push('processed') + } + + const queue = buildQueue(processTask, 1) + queue.drain = () => logs.push('called drain') + + queue.drained().then(() => logs.push('drained promise resolved')) + + await Promise.all([ + queue.push(), + queue.push(), + queue.push() + ]) + + t.deepEqual(logs, [ + 'processed', + 'processed', + 'processed', + 'called drain', + 'drained promise resolved' + ], 'events happened in correct order') +}) + +test('drained should handle undefined drain function', async function (t) { + const queue = buildQueue(worker, 1) + + async function worker (arg) { + await sleep(10) + return arg + } + + queue.drain = undefined + queue.push(1) + await queue.drained() + + t.pass('drained resolved successfully with undefined drain') +}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js new file mode 100644 index 0000000..79f0f6c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js @@ -0,0 +1,653 @@ +'use strict' + +/* eslint-disable no-var */ + +var test = require('tape') +var buildQueue = require('../') + +test('concurrency', function (t) { + t.plan(6) + t.throws(buildQueue.bind(null, worker, 0)) + t.throws(buildQueue.bind(null, worker, NaN)) + t.doesNotThrow(buildQueue.bind(null, worker, 1)) + + var queue = buildQueue(worker, 1) + t.throws(function () { + queue.concurrency = 0 + }) + t.throws(function () { + queue.concurrency = NaN + }) + t.doesNotThrow(function () { + queue.concurrency = 2 + }) + + function worker (arg, cb) { + cb(null, true) + } +}) + +test('worker execution', function (t) { + t.plan(3) + + var queue = buildQueue(worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + function worker (arg, cb) { + t.equal(arg, 42) + cb(null, true) + } +}) + +test('limit', function (t) { + t.plan(4) + + var expected = [10, 0] + var queue = buildQueue(worker, 1) + + queue.push(10, result) + queue.push(0, result) + + function result (err, arg) { + t.error(err, 'no error') + t.equal(arg, expected.shift(), 'the result matches') + } + + function worker (arg, cb) { + setTimeout(cb, arg, null, arg) + } +}) + +test('multiple executions', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + toExec.forEach(function (task) { + queue.push(task, done) + }) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('multiple executions, one after another', function (t) { + t.plan(15) + + var queue = buildQueue(worker, 1) + var toExec = [1, 2, 3, 4, 5] + var count = 0 + + queue.push(toExec[0], done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, toExec[count - 1], 'the result matches') + if (count < toExec.length) { + queue.push(toExec[count], done) + } + } + + function worker (arg, cb) { + t.equal(arg, toExec[count], 'arg matches') + count++ + setImmediate(cb, null, arg) + } +}) + +test('set this', function (t) { + t.plan(3) + + var that = {} + var queue = buildQueue(that, worker, 1) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(this, that, 'this matches') + }) + + function worker (arg, cb) { + t.equal(this, that, 'this matches') + cb(null, true) + } +}) + +test('drain', function (t) { + t.plan(4) + + var queue = buildQueue(worker, 1) + var worked = false + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + queue.drain = function () { + t.equal(true, worked, 'drained') + } + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + setImmediate(cb, null, true) + } +}) + +test('pause && resume', function (t) { + t.plan(13) + + var queue = buildQueue(worker, 1) + var worked = false + var expected = [42, 24] + + t.notOk(queue.paused, 'it should not be paused') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + queue.push(24, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + + queue.resume() + queue.pause() + queue.resume() + queue.resume() // second resume is a no-op + + function worker (arg, cb) { + t.notOk(queue.paused, 'it should not be paused') + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + t.equal(arg, expected.shift()) + worked = true + process.nextTick(function () { cb(null, true) }) + } +}) + +test('pause in flight && resume', function (t) { + t.plan(16) + + var queue = buildQueue(worker, 1) + var expected = [42, 24, 12] + + t.notOk(queue.paused, 'it should not be paused') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.ok(queue.paused, 'it should be paused') + process.nextTick(function () { + queue.resume() + queue.pause() + queue.resume() + }) + }) + + queue.push(24, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.paused, 'it should not be paused') + }) + + queue.push(12, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.paused, 'it should not be paused') + }) + + queue.pause() + + function worker (arg, cb) { + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + t.equal(arg, expected.shift()) + process.nextTick(function () { cb(null, true) }) + } +}) + +test('altering concurrency', function (t) { + t.plan(24) + + var queue = buildQueue(worker, 1) + + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + + queue.pause() + + queue.concurrency = 3 // concurrency changes are ignored while paused + queue.concurrency = 2 + + queue.resume() + + t.equal(queue.running(), 2, '2 jobs running') + + queue.concurrency = 3 + + t.equal(queue.running(), 3, '3 jobs running') + + queue.concurrency = 1 + + t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed + + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + queue.push(24, workDone) + + function workDone (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('idle()', function (t) { + t.plan(12) + + var queue = buildQueue(worker, 1) + + t.ok(queue.idle(), 'queue is idle') + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + t.notOk(queue.idle(), 'queue is not idle') + }) + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + // it will go idle after executing this function + setImmediate(function () { + t.ok(queue.idle(), 'queue is now idle') + }) + }) + + t.notOk(queue.idle(), 'queue is not idle') + + function worker (arg, cb) { + t.notOk(queue.idle(), 'queue is not idle') + t.equal(arg, 42) + setImmediate(cb, null, true) + } +}) + +test('saturated', function (t) { + t.plan(9) + + var queue = buildQueue(worker, 1) + var preworked = 0 + var worked = 0 + + queue.saturated = function () { + t.pass('saturated') + t.equal(preworked, 1, 'started 1 task') + t.equal(worked, 0, 'worked zero task') + } + + queue.push(42, done) + queue.push(42, done) + + function done (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + } + + function worker (arg, cb) { + t.equal(arg, 42) + preworked++ + setImmediate(function () { + worked++ + cb(null, true) + }) + } +}) + +test('length', function (t) { + t.plan(7) + + var queue = buildQueue(worker, 1) + + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.length(), 1, 'one task waiting') + queue.push(42, done) + t.equal(queue.length(), 2, 'two tasks waiting') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('getQueue', function (t) { + t.plan(10) + + var queue = buildQueue(worker, 1) + + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 0, 'nothing waiting') + queue.push(42, done) + t.equal(queue.getQueue().length, 1, 'one task waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + queue.push(43, done) + t.equal(queue.getQueue().length, 2, 'two tasks waiting') + t.equal(queue.getQueue()[0], 42, 'should be equal') + t.equal(queue.getQueue()[1], 43, 'should be equal') + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift', function (t) { + t.plan(8) + + var queue = buildQueue(worker, 1) + var expected = [1, 2, 3, 4] + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + + function done (err, result) { + t.error(err, 'no error') + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('unshift && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.unshift(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('push && empty', function (t) { + t.plan(2) + + var queue = buildQueue(worker, 1) + var completed = false + + queue.pause() + + queue.empty = function () { + t.notOk(completed, 'the task has not completed yet') + } + + queue.push(1, done) + + queue.resume() + + function done (err, result) { + completed = true + t.error(err, 'no error') + } + + function worker (arg, cb) { + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('kill', function (t) { + t.plan(5) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.fail('drain should never be called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.kill() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('killAndDrain', function (t) { + t.plan(6) + + var queue = buildQueue(worker, 1) + var expected = [1] + + var predrain = queue.drain + + queue.drain = function drain () { + t.pass('drain has been called') + } + + queue.push(1, done) + queue.push(4, done) + queue.unshift(3, done) + queue.unshift(2, done) + queue.killAndDrain() + + function done (err, result) { + t.error(err, 'no error') + setImmediate(function () { + t.equal(queue.length(), 0, 'no queued tasks') + t.equal(queue.running(), 0, 'no running tasks') + t.equal(queue.drain, predrain, 'drain is back to default') + }) + } + + function worker (arg, cb) { + t.equal(expected.shift(), arg, 'tasks come in order') + setImmediate(function () { + cb(null, true) + }) + } +}) + +test('pause && idle', function (t) { + t.plan(11) + + var queue = buildQueue(worker, 1) + var worked = false + + t.notOk(queue.paused, 'it should not be paused') + t.ok(queue.idle(), 'should be idle') + + queue.pause() + + queue.push(42, function (err, result) { + t.error(err, 'no error') + t.equal(result, true, 'result matches') + }) + + t.notOk(worked, 'it should be paused') + t.ok(queue.paused, 'it should be paused') + t.notOk(queue.idle(), 'should not be idle') + + queue.resume() + + t.notOk(queue.paused, 'it should not be paused') + t.notOk(queue.idle(), 'it should not be idle') + + function worker (arg, cb) { + t.equal(arg, 42) + worked = true + process.nextTick(cb.bind(null, null, true)) + process.nextTick(function () { + t.ok(queue.idle(), 'is should be idle') + }) + } +}) + +test('push without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.push(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('unshift without cb', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + + queue.unshift(42) + + function worker (arg, cb) { + t.equal(arg, 42) + cb() + } +}) + +test('push with worker throwing error', function (t) { + t.plan(5) + var q = buildQueue(function (task, cb) { + cb(new Error('test error'), null) + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + q.push(42, function (err) { + t.ok(err instanceof Error, 'push callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + }) +}) + +test('unshift with worker throwing error', function (t) { + t.plan(5) + var q = buildQueue(function (task, cb) { + cb(new Error('test error'), null) + }, 1) + q.error(function (err, task) { + t.ok(err instanceof Error, 'global error handler should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + t.equal(task, 42, 'The task executed should be passed') + }) + q.unshift(42, function (err) { + t.ok(err instanceof Error, 'unshift callback should catch the error') + t.match(err.message, /test error/, 'error message should be "test error"') + }) +}) + +test('pause/resume should trigger drain event', function (t) { + t.plan(1) + + var queue = buildQueue(worker, 1) + queue.pause() + queue.drain = function () { + t.pass('drain should be called') + } + + function worker (arg, cb) { + cb(null, true) + } + + queue.resume() +}) + +test('paused flag', function (t) { + t.plan(2) + + var queue = buildQueue(function (arg, cb) { + cb(null) + }, 1) + t.equal(queue.paused, false) + queue.pause() + t.equal(queue.paused, true) +}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json new file mode 100644 index 0000000..66e16e9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "files": [ + "./example.ts" + ] +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md new file mode 100644 index 0000000..8d756fe --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md @@ -0,0 +1,237 @@ +# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) + +> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save fill-range +``` + +## Usage + +Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. + +```js +const fill = require('fill-range'); +// fill(from, to[, step, options]); + +console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] +console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 +``` + +**Params** + +* `from`: **{String|Number}** the number or letter to start with +* `to`: **{String|Number}** the number or letter to end with +* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. +* `options`: **{Object|Function}**: See all available [options](#options) + +## Examples + +By default, an array of values is returned. + +**Alphabetical ranges** + +```js +console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] +console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] +``` + +**Numerical ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] +``` + +**Negative ranges** + +Numbers can be defined as actual numbers or strings. + +```js +console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] +console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] +``` + +**Steps (increments)** + +```js +// numerical ranges with increments +console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] +console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] +console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] + +// alphabetical ranges with increments +console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] +``` + +## Options + +### options.step + +**Type**: `number` (formatted as a string or number) + +**Default**: `undefined` + +**Description**: The increment to use for the range. Can be used with letters or numbers. + +**Example(s)** + +```js +// numbers +console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] +console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] +console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] + +// letters +console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] +console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] +console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] +``` + +### options.strictRanges + +**Type**: `boolean` + +**Default**: `false` + +**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. + +**Example(s)** + +The following are all invalid: + +```js +fill('1.1', '2'); // decimals not supported in ranges +fill('a', '2'); // incompatible range values +fill(1, 10, 'foo'); // invalid "step" argument +``` + +### options.stringify + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Cast all returned values to strings. By default, integers are returned as numbers. + +**Example(s)** + +```js +console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] +console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] +``` + +### options.toRegex + +**Type**: `boolean` + +**Default**: `undefined` + +**Description**: Create a regex-compatible source string, instead of expanding values to an array. + +**Example(s)** + +```js +// alphabetical range +console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' +// alphabetical with step +console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' +// numerical range +console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' +// numerical range with zero padding +console.log(fill('000001', '100000', { toRegex: true })); +//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' +``` + +### options.transform + +**Type**: `function` + +**Default**: `undefined` + +**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. + +**Example(s)** + +```js +// add zero padding +console.log(fill(1, 5, value => String(value).padStart(4, '0'))); +//=> ['0001', '0002', '0003', '0004', '0005'] +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 116 | [jonschlinkert](https://github.com/jonschlinkert) | +| 4 | [paulmillr](https://github.com/paulmillr) | +| 2 | [realityking](https://github.com/realityking) | +| 2 | [bluelovers](https://github.com/bluelovers) | +| 1 | [edorivai](https://github.com/edorivai) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js new file mode 100644 index 0000000..ddb212e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js @@ -0,0 +1,248 @@ +/*! + * fill-range + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Licensed under the MIT License. + */ + +'use strict'; + +const util = require('util'); +const toRegexRange = require('to-regex-range'); + +const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); + +const transform = toNumber => { + return value => toNumber === true ? Number(value) : String(value); +}; + +const isValidValue = value => { + return typeof value === 'number' || (typeof value === 'string' && value !== ''); +}; + +const isNumber = num => Number.isInteger(+num); + +const zeros = input => { + let value = `${input}`; + let index = -1; + if (value[0] === '-') value = value.slice(1); + if (value === '0') return false; + while (value[++index] === '0'); + return index > 0; +}; + +const stringify = (start, end, options) => { + if (typeof start === 'string' || typeof end === 'string') { + return true; + } + return options.stringify === true; +}; + +const pad = (input, maxLength, toNumber) => { + if (maxLength > 0) { + let dash = input[0] === '-' ? '-' : ''; + if (dash) input = input.slice(1); + input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); + } + if (toNumber === false) { + return String(input); + } + return input; +}; + +const toMaxLen = (input, maxLength) => { + let negative = input[0] === '-' ? '-' : ''; + if (negative) { + input = input.slice(1); + maxLength--; + } + while (input.length < maxLength) input = '0' + input; + return negative ? ('-' + input) : input; +}; + +const toSequence = (parts, options, maxLen) => { + parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); + + let prefix = options.capture ? '' : '?:'; + let positives = ''; + let negatives = ''; + let result; + + if (parts.positives.length) { + positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); + } + + if (parts.negatives.length) { + negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; + } + + if (positives && negatives) { + result = `${positives}|${negatives}`; + } else { + result = positives || negatives; + } + + if (options.wrap) { + return `(${prefix}${result})`; + } + + return result; +}; + +const toRange = (a, b, isNumbers, options) => { + if (isNumbers) { + return toRegexRange(a, b, { wrap: false, ...options }); + } + + let start = String.fromCharCode(a); + if (a === b) return start; + + let stop = String.fromCharCode(b); + return `[${start}-${stop}]`; +}; + +const toRegex = (start, end, options) => { + if (Array.isArray(start)) { + let wrap = options.wrap === true; + let prefix = options.capture ? '' : '?:'; + return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); + } + return toRegexRange(start, end, options); +}; + +const rangeError = (...args) => { + return new RangeError('Invalid range arguments: ' + util.inspect(...args)); +}; + +const invalidRange = (start, end, options) => { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; +}; + +const invalidStep = (step, options) => { + if (options.strictRanges === true) { + throw new TypeError(`Expected step "${step}" to be a number`); + } + return []; +}; + +const fillNumbers = (start, end, step = 1, options = {}) => { + let a = Number(start); + let b = Number(end); + + if (!Number.isInteger(a) || !Number.isInteger(b)) { + if (options.strictRanges === true) throw rangeError([start, end]); + return []; + } + + // fix negative zero + if (a === 0) a = 0; + if (b === 0) b = 0; + + let descending = a > b; + let startString = String(start); + let endString = String(end); + let stepString = String(step); + step = Math.max(Math.abs(step), 1); + + let padded = zeros(startString) || zeros(endString) || zeros(stepString); + let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; + let toNumber = padded === false && stringify(start, end, options) === false; + let format = options.transform || transform(toNumber); + + if (options.toRegex && step === 1) { + return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); + } + + let parts = { negatives: [], positives: [] }; + let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + if (options.toRegex === true && step > 1) { + push(a); + } else { + range.push(pad(format(a, index), maxLen, toNumber)); + } + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return step > 1 + ? toSequence(parts, options, maxLen) + : toRegex(range, null, { wrap: false, ...options }); + } + + return range; +}; + +const fillLetters = (start, end, step = 1, options = {}) => { + if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { + return invalidRange(start, end, options); + } + + let format = options.transform || (val => String.fromCharCode(val)); + let a = `${start}`.charCodeAt(0); + let b = `${end}`.charCodeAt(0); + + let descending = a > b; + let min = Math.min(a, b); + let max = Math.max(a, b); + + if (options.toRegex && step === 1) { + return toRange(min, max, false, options); + } + + let range = []; + let index = 0; + + while (descending ? a >= b : a <= b) { + range.push(format(a, index)); + a = descending ? a - step : a + step; + index++; + } + + if (options.toRegex === true) { + return toRegex(range, null, { wrap: false, options }); + } + + return range; +}; + +const fill = (start, end, step, options = {}) => { + if (end == null && isValidValue(start)) { + return [start]; + } + + if (!isValidValue(start) || !isValidValue(end)) { + return invalidRange(start, end, options); + } + + if (typeof step === 'function') { + return fill(start, end, 1, { transform: step }); + } + + if (isObject(step)) { + return fill(start, end, 0, step); + } + + let opts = { ...options }; + if (opts.capture === true) opts.wrap = true; + step = step || opts.step || 1; + + if (!isNumber(step)) { + if (step != null && !isObject(step)) return invalidStep(step, opts); + return fill(start, end, 1, step); + } + + if (isNumber(start) && isNumber(end)) { + return fillNumbers(start, end, step, opts); + } + + return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); +}; + +module.exports = fill; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json new file mode 100644 index 0000000..582357f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json @@ -0,0 +1,74 @@ +{ + "name": "fill-range", + "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", + "version": "7.1.1", + "homepage": "https://github.com/jonschlinkert/fill-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Edo Rivai (edo.rivai.nl)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Paul Miller (paulmillr.com)", + "Rouven Weßling (www.rouvenwessling.de)", + "(https://github.com/wtgtybhertgeghgtwtg)" + ], + "repository": "jonschlinkert/fill-range", + "bugs": { + "url": "https://github.com/jonschlinkert/fill-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "devDependencies": { + "gulp-format-md": "^2.0.0", + "mocha": "^6.1.1", + "nyc": "^15.1.0" + }, + "keywords": [ + "alpha", + "alphabetical", + "array", + "bash", + "brace", + "expand", + "expansion", + "fill", + "glob", + "match", + "matches", + "matching", + "number", + "numerical", + "range", + "ranges", + "regex", + "sh" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js new file mode 100644 index 0000000..2dd7574 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js @@ -0,0 +1,52 @@ +'use strict'; +const {PassThrough: PassThroughStream} = require('stream'); + +module.exports = options => { + options = {...options}; + + const {array} = options; + let {encoding} = options; + const isBuffer = encoding === 'buffer'; + let objectMode = false; + + if (array) { + objectMode = !(encoding || isBuffer); + } else { + encoding = encoding || 'utf8'; + } + + if (isBuffer) { + encoding = null; + } + + const stream = new PassThroughStream({objectMode}); + + if (encoding) { + stream.setEncoding(encoding); + } + + let length = 0; + const chunks = []; + + stream.on('data', chunk => { + chunks.push(chunk); + + if (objectMode) { + length = chunks.length; + } else { + length += chunk.length; + } + }); + + stream.getBufferedValue = () => { + if (array) { + return chunks; + } + + return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); + }; + + stream.getBufferedLength = () => length; + + return stream; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts new file mode 100644 index 0000000..9485b2b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts @@ -0,0 +1,105 @@ +/// +import {Stream} from 'stream'; + +declare class MaxBufferErrorClass extends Error { + readonly name: 'MaxBufferError'; + constructor(); +} + +declare namespace getStream { + interface Options { + /** + Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error. + + @default Infinity + */ + readonly maxBuffer?: number; + } + + interface OptionsWithEncoding extends Options { + /** + [Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + + @default 'utf8' + */ + readonly encoding?: EncodingType; + } + + type MaxBufferError = MaxBufferErrorClass; +} + +declare const getStream: { + /** + Get the `stream` as a string. + + @returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + + @example + ``` + import * as fs from 'fs'; + import getStream = require('get-stream'); + + (async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + // ,,))))))));, + // __)))))))))))))), + // \|/ -\(((((''''((((((((. + // -*-==//////(('' . `)))))), + // /|\ ))| o ;-. '((((( ,(, + // ( `| / ) ;))))' ,_))^;(~ + // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + // ; ''''```` `: `:::|\,__,%% );`'; ~ + // | _ ) / `:|`----' `-' + // ______/\/~ | / / + // /~;;.____/;;' / ___--,-( `;;;/ + // / // _;______;'------~~~~~ /;;/\ / + // // | | / ; \;;,\ + // (<_ | ; /',/-----' _> + // \_| ||_ //~;~~~~~~~~~ + // `\_| (,~~ + // \~\ + // ~~ + })(); + ``` + */ + (stream: Stream, options?: getStream.OptionsWithEncoding): Promise; + + /** + Get the `stream` as a buffer. + + It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + */ + buffer( + stream: Stream, + options?: getStream.Options + ): Promise; + + /** + Get the `stream` as an array of values. + + It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + + - When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + - When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + - When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + */ + array( + stream: Stream, + options?: getStream.Options + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding<'buffer'> + ): Promise; + array( + stream: Stream, + options: getStream.OptionsWithEncoding + ): Promise; + + MaxBufferError: typeof MaxBufferErrorClass; +}; + +export = getStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js new file mode 100644 index 0000000..1c5d028 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js @@ -0,0 +1,61 @@ +'use strict'; +const {constants: BufferConstants} = require('buffer'); +const stream = require('stream'); +const {promisify} = require('util'); +const bufferStream = require('./buffer-stream'); + +const streamPipelinePromisified = promisify(stream.pipeline); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +async function getStream(inputStream, options) { + if (!inputStream) { + throw new Error('Expected a stream'); + } + + options = { + maxBuffer: Infinity, + ...options + }; + + const {maxBuffer} = options; + const stream = bufferStream(options); + + await new Promise((resolve, reject) => { + const rejectPromise = error => { + // Don't retrieve an oversized buffer. + if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { + error.bufferedData = stream.getBufferedValue(); + } + + reject(error); + }; + + (async () => { + try { + await streamPipelinePromisified(inputStream, stream); + resolve(); + } catch (error) { + rejectPromise(error); + } + })(); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }); + + return stream.getBufferedValue(); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); +module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); +module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json new file mode 100644 index 0000000..bd47a75 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json @@ -0,0 +1,47 @@ +{ + "name": "get-stream", + "version": "6.0.1", + "description": "Get a stream as a string, buffer, or array", + "license": "MIT", + "repository": "sindresorhus/get-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "buffer-stream.js" + ], + "keywords": [ + "get", + "stream", + "promise", + "concat", + "string", + "text", + "buffer", + "read", + "data", + "consume", + "readable", + "readablestream", + "array", + "object" + ], + "devDependencies": { + "@types/node": "^14.0.27", + "ava": "^2.4.0", + "into-stream": "^5.0.0", + "tsd": "^0.13.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md new file mode 100644 index 0000000..70b01fd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md @@ -0,0 +1,124 @@ +# get-stream + +> Get a stream as a string, buffer, or array + +## Install + +``` +$ npm install get-stream +``` + +## Usage + +```js +const fs = require('fs'); +const getStream = require('get-stream'); + +(async () => { + const stream = fs.createReadStream('unicorn.txt'); + + console.log(await getStream(stream)); + /* + ,,))))))));, + __)))))))))))))), + \|/ -\(((((''''((((((((. + -*-==//////(('' . `)))))), + /|\ ))| o ;-. '((((( ,(, + ( `| / ) ;))))' ,_))^;(~ + | | | ,))((((_ _____------~~~-. %,;(;(>';'~ + o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ + ; ''''```` `: `:::|\,__,%% );`'; ~ + | _ ) / `:|`----' `-' + ______/\/~ | / / + /~;;.____/;;' / ___--,-( `;;;/ + / // _;______;'------~~~~~ /;;/\ / + // | | / ; \;;,\ + (<_ | ; /',/-----' _> + \_| ||_ //~;~~~~~~~~~ + `\_| (,~~ + \~\ + ~~ + */ +})(); +``` + +## API + +The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. + +### getStream(stream, options?) + +Get the `stream` as a string. + +#### options + +Type: `object` + +##### encoding + +Type: `string`\ +Default: `'utf8'` + +[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. + +##### maxBuffer + +Type: `number`\ +Default: `Infinity` + +Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. + +### getStream.buffer(stream, options?) + +Get the `stream` as a buffer. + +It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. + +### getStream.array(stream, options?) + +Get the `stream` as an array of values. + +It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: + +- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). + +- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. + +- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. + +## Errors + +If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. + +```js +(async () => { + try { + await getStream(streamThatErrorsAtTheEnd('unicorn')); + } catch (error) { + console.log(error.bufferedData); + //=> 'unicorn' + } +})() +``` + +## FAQ + +### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? + +This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. + +## Related + +- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md new file mode 100644 index 0000000..fb9de96 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md @@ -0,0 +1,110 @@ +### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) + + +### Bug Fixes + +* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) + +### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) + + +### Bug Fixes + +* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) + +## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) + + +### Features + +* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) + +## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* Drop support for node <6 & bump dependencies + +### Miscellaneous Chores + +* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) + +## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* question marks are valid path characters on Windows so avoid flagging as a glob when alone +* Update is-glob dependency + +### Features + +* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) +* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) +* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) + +## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) + + +### Features + +* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) +* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) +* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) +* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) +* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) + + +### Bug Fixes + +* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) + +### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) + + +### Features + +* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) + + +### Bug Fixes + +* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) + +## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) + + +### ⚠ BREAKING CHANGES + +* update is-glob dependency + +### Features + +* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) + +## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) + + +### Features + +* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) + +## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) + +## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) + + +### Reverts + +* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) + +## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) + + +### Features + +* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) + +## 1.0.0 (2021-01-27) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE new file mode 100644 index 0000000..63222d7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015, 2019 Elan Shanker + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md new file mode 100644 index 0000000..36a2793 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md @@ -0,0 +1,137 @@ +

+ + + +

+ +# glob-parent + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] + +Extract the non-magic parent path from a glob string. + +## Usage + +```js +var globParent = require('glob-parent'); + +globParent('path/to/*.js'); // 'path/to' +globParent('/root/path/to/*.js'); // '/root/path/to' +globParent('/*.js'); // '/' +globParent('*.js'); // '.' +globParent('**/*.js'); // '.' +globParent('path/{to,from}'); // 'path' +globParent('path/!(to|from)'); // 'path' +globParent('path/?(to|from)'); // 'path' +globParent('path/+(to|from)'); // 'path' +globParent('path/*(to|from)'); // 'path' +globParent('path/@(to|from)'); // 'path' +globParent('path/**/*'); // 'path' + +// if provided a non-glob path, returns the nearest dir +globParent('path/foo/bar.js'); // 'path/foo' +globParent('path/foo/'); // 'path/foo' +globParent('path/foo'); // 'path' (see issue #3 for details) +``` + +## API + +### `globParent(maybeGlobString, [options])` + +Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. + +#### options + +```js +{ + // Disables the automatic conversion of slashes for Windows + flipBackslashes: true +} +``` + +## Escaping + +The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: + +- `?` (question mark) unless used as a path segment alone +- `*` (asterisk) +- `|` (pipe) +- `(` (opening parenthesis) +- `)` (closing parenthesis) +- `{` (opening curly brace) +- `}` (closing curly brace) +- `[` (opening bracket) +- `]` (closing bracket) + +**Example** + +```js +globParent('foo/[bar]/') // 'foo' +globParent('foo/\\[bar]/') // 'foo/[bar]' +``` + +## Limitations + +### Braces & Brackets +This library attempts a quick and imperfect method of determining which path +parts have glob magic without fully parsing/lexing the pattern. There are some +advanced use cases that can trip it up, such as nested braces where the outer +pair is escaped and the inner one contains a path separator. If you find +yourself in the unlikely circumstance of being affected by this or need to +ensure higher-fidelity glob handling in your library, it is recommended that you +pre-process your input with [expand-braces] and/or [expand-brackets]. + +### Windows +Backslashes are not valid path separators for globs. If a path with backslashes +is provided anyway, for simple cases, glob-parent will replace the path +separator for you and return the non-glob parent path (now with +forward-slashes, which are still valid as Windows path separators). + +This cannot be used in conjunction with escape characters. + +```js +// BAD +globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' + +// GOOD +globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' +``` + +If you are using escape characters for a pattern without path parts (i.e. +relative to `cwd`), prefix with `./` to avoid confusing glob-parent. + +```js +// BAD +globParent('foo \\[bar]') // 'foo ' +globParent('foo \\[bar]*') // 'foo ' + +// GOOD +globParent('./foo \\[bar]') // 'foo [bar]' +globParent('./foo \\[bar]*') // '.' +``` + +## License + +ISC + +[expand-braces]: https://github.com/jonschlinkert/expand-braces +[expand-brackets]: https://github.com/jonschlinkert/expand-brackets + +[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg +[npm-url]: https://www.npmjs.com/package/glob-parent +[npm-image]: https://img.shields.io/npm/v/glob-parent.svg + +[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master +[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master + +[travis-url]: https://travis-ci.org/gulpjs/glob-parent +[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci + +[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent +[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor + +[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent +[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg + +[gitter-url]: https://gitter.im/gulpjs/gulp +[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js new file mode 100644 index 0000000..09e257e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js @@ -0,0 +1,42 @@ +'use strict'; + +var isGlob = require('is-glob'); +var pathPosixDirname = require('path').posix.dirname; +var isWin32 = require('os').platform() === 'win32'; + +var slash = '/'; +var backslash = /\\/g; +var enclosure = /[\{\[].*[\}\]]$/; +var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; +var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; + +/** + * @param {string} str + * @param {Object} opts + * @param {boolean} [opts.flipBackslashes=true] + * @returns {string} + */ +module.exports = function globParent(str, opts) { + var options = Object.assign({ flipBackslashes: true }, opts); + + // flip windows path separators + if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { + str = str.replace(backslash, slash); + } + + // special case for strings ending in enclosure containing path separator + if (enclosure.test(str)) { + str += slash; + } + + // preserves full path in case of trailing path separator + str += 'a'; + + // remove path parts that are globby + do { + str = pathPosixDirname(str); + } while (isGlob(str) || globby.test(str)); + + // remove escape chars and return result + return str.replace(escaped, '$1'); +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json new file mode 100644 index 0000000..125c971 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json @@ -0,0 +1,48 @@ +{ + "name": "glob-parent", + "version": "5.1.2", + "description": "Extract the non-magic parent path from a glob string.", + "author": "Gulp Team (https://gulpjs.com/)", + "contributors": [ + "Elan Shanker (https://github.com/es128)", + "Blaine Bublitz " + ], + "repository": "gulpjs/glob-parent", + "license": "ISC", + "engines": { + "node": ">= 6" + }, + "main": "index.js", + "files": [ + "LICENSE", + "index.js" + ], + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "nyc mocha --async-only", + "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "dependencies": { + "is-glob": "^4.0.1" + }, + "devDependencies": { + "coveralls": "^3.0.11", + "eslint": "^2.13.1", + "eslint-config-gulp": "^3.0.1", + "expect": "^1.20.2", + "mocha": "^6.0.2", + "nyc": "^13.3.0" + }, + "keywords": [ + "glob", + "parent", + "strip", + "path", + "dirname", + "directory", + "base", + "wildcard" + ] +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md new file mode 100644 index 0000000..70d0392 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md @@ -0,0 +1,11 @@ +# 2.1.0 + +## TypeScript types + +- Add [TypeScript definitions](src/main.d.ts) + +# 2.0.0 + +## Breaking changes + +- Minimal supported Node.js version is now `10.17.0` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE new file mode 100644 index 0000000..9af9492 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 ehmicky + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md new file mode 100644 index 0000000..2af37c3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md @@ -0,0 +1,165 @@ +[![Codecov](https://img.shields.io/codecov/c/github/ehmicky/human-signals.svg?label=tested&logo=codecov)](https://codecov.io/gh/ehmicky/human-signals) +[![Travis](https://img.shields.io/badge/cross-platform-4cc61e.svg?logo=travis)](https://travis-ci.org/ehmicky/human-signals) +[![Node](https://img.shields.io/node/v/human-signals.svg?logo=node.js)](https://www.npmjs.com/package/human-signals) +[![Gitter](https://img.shields.io/gitter/room/ehmicky/human-signals.svg?logo=gitter)](https://gitter.im/ehmicky/human-signals) +[![Twitter](https://img.shields.io/badge/%E2%80%8B-twitter-4cc61e.svg?logo=twitter)](https://twitter.com/intent/follow?screen_name=ehmicky) +[![Medium](https://img.shields.io/badge/%E2%80%8B-medium-4cc61e.svg?logo=medium)](https://medium.com/@ehmicky) + +Human-friendly process signals. + +This is a map of known process signals with some information about each signal. + +Unlike +[`os.constants.signals`](https://nodejs.org/api/os.html#os_signal_constants) +this includes: + +- human-friendly [descriptions](#description) +- [default actions](#action), including whether they [can be prevented](#forced) +- whether the signal is [supported](#supported) by the current OS + +# Example + +```js +const { signalsByName, signalsByNumber } = require('human-signals') + +console.log(signalsByName.SIGINT) +// { +// name: 'SIGINT', +// number: 2, +// description: 'User interruption with CTRL-C', +// supported: true, +// action: 'terminate', +// forced: false, +// standard: 'ansi' +// } + +console.log(signalsByNumber[8]) +// { +// name: 'SIGFPE', +// number: 8, +// description: 'Floating point arithmetic error', +// supported: true, +// action: 'core', +// forced: false, +// standard: 'ansi' +// } +``` + +# Install + +```bash +npm install human-signals +``` + +# Usage + +## signalsByName + +_Type_: `object` + +Object whose keys are signal [names](#name) and values are +[signal objects](#signal). + +## signalsByNumber + +_Type_: `object` + +Object whose keys are signal [numbers](#number) and values are +[signal objects](#signal). + +## signal + +_Type_: `object` + +Signal object with the following properties. + +### name + +_Type_: `string` + +Standard name of the signal, for example `'SIGINT'`. + +### number + +_Type_: `number` + +Code number of the signal, for example `2`. While most `number` are +cross-platform, some are different between different OS. + +### description + +_Type_: `string` + +Human-friendly description for the signal, for example +`'User interruption with CTRL-C'`. + +### supported + +_Type_: `boolean` + +Whether the current OS can handle this signal in Node.js using +[`process.on(name, handler)`](https://nodejs.org/api/process.html#process_signal_events). + +The list of supported signals +[is OS-specific](https://github.com/ehmicky/cross-platform-node-guide/blob/master/docs/6_networking_ipc/signals.md#cross-platform-signals). + +### action + +_Type_: `string`\ +_Enum_: `'terminate'`, `'core'`, `'ignore'`, `'pause'`, `'unpause'` + +What is the default action for this signal when it is not handled. + +### forced + +_Type_: `boolean` + +Whether the signal's default action cannot be prevented. This is `true` for +`SIGTERM`, `SIGKILL` and `SIGSTOP`. + +### standard + +_Type_: `string`\ +_Enum_: `'ansi'`, `'posix'`, `'bsd'`, `'systemv'`, `'other'` + +Which standard defined that signal. + +# Support + +If you found a bug or would like a new feature, _don't hesitate_ to +[submit an issue on GitHub](../../issues). + +For other questions, feel free to +[chat with us on Gitter](https://gitter.im/ehmicky/human-signals). + +Everyone is welcome regardless of personal background. We enforce a +[Code of conduct](CODE_OF_CONDUCT.md) in order to promote a positive and +inclusive environment. + +# Contributing + +This project was made with ❤️. The simplest way to give back is by starring and +sharing it online. + +If the documentation is unclear or has a typo, please click on the page's `Edit` +button (pencil icon) and suggest a correction. + +If you would like to help us fix a bug or add a new feature, please check our +[guidelines](CONTRIBUTING.md). Pull requests are welcome! + +Thanks go to our wonderful contributors: + + + + + + + + + +

ehmicky

💻 🎨 🤔 📖

electrovir

💻
+ + + + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js new file mode 100644 index 0000000..98e8fce --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js @@ -0,0 +1,273 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGNALS=void 0; + +const SIGNALS=[ +{ +name:"SIGHUP", +number:1, +action:"terminate", +description:"Terminal closed", +standard:"posix"}, + +{ +name:"SIGINT", +number:2, +action:"terminate", +description:"User interruption with CTRL-C", +standard:"ansi"}, + +{ +name:"SIGQUIT", +number:3, +action:"core", +description:"User interruption with CTRL-\\", +standard:"posix"}, + +{ +name:"SIGILL", +number:4, +action:"core", +description:"Invalid machine instruction", +standard:"ansi"}, + +{ +name:"SIGTRAP", +number:5, +action:"core", +description:"Debugger breakpoint", +standard:"posix"}, + +{ +name:"SIGABRT", +number:6, +action:"core", +description:"Aborted", +standard:"ansi"}, + +{ +name:"SIGIOT", +number:6, +action:"core", +description:"Aborted", +standard:"bsd"}, + +{ +name:"SIGBUS", +number:7, +action:"core", +description: +"Bus error due to misaligned, non-existing address or paging error", +standard:"bsd"}, + +{ +name:"SIGEMT", +number:7, +action:"terminate", +description:"Command should be emulated but is not implemented", +standard:"other"}, + +{ +name:"SIGFPE", +number:8, +action:"core", +description:"Floating point arithmetic error", +standard:"ansi"}, + +{ +name:"SIGKILL", +number:9, +action:"terminate", +description:"Forced termination", +standard:"posix", +forced:true}, + +{ +name:"SIGUSR1", +number:10, +action:"terminate", +description:"Application-specific signal", +standard:"posix"}, + +{ +name:"SIGSEGV", +number:11, +action:"core", +description:"Segmentation fault", +standard:"ansi"}, + +{ +name:"SIGUSR2", +number:12, +action:"terminate", +description:"Application-specific signal", +standard:"posix"}, + +{ +name:"SIGPIPE", +number:13, +action:"terminate", +description:"Broken pipe or socket", +standard:"posix"}, + +{ +name:"SIGALRM", +number:14, +action:"terminate", +description:"Timeout or timer", +standard:"posix"}, + +{ +name:"SIGTERM", +number:15, +action:"terminate", +description:"Termination", +standard:"ansi"}, + +{ +name:"SIGSTKFLT", +number:16, +action:"terminate", +description:"Stack is empty or overflowed", +standard:"other"}, + +{ +name:"SIGCHLD", +number:17, +action:"ignore", +description:"Child process terminated, paused or unpaused", +standard:"posix"}, + +{ +name:"SIGCLD", +number:17, +action:"ignore", +description:"Child process terminated, paused or unpaused", +standard:"other"}, + +{ +name:"SIGCONT", +number:18, +action:"unpause", +description:"Unpaused", +standard:"posix", +forced:true}, + +{ +name:"SIGSTOP", +number:19, +action:"pause", +description:"Paused", +standard:"posix", +forced:true}, + +{ +name:"SIGTSTP", +number:20, +action:"pause", +description:"Paused using CTRL-Z or \"suspend\"", +standard:"posix"}, + +{ +name:"SIGTTIN", +number:21, +action:"pause", +description:"Background process cannot read terminal input", +standard:"posix"}, + +{ +name:"SIGBREAK", +number:21, +action:"terminate", +description:"User interruption with CTRL-BREAK", +standard:"other"}, + +{ +name:"SIGTTOU", +number:22, +action:"pause", +description:"Background process cannot write to terminal output", +standard:"posix"}, + +{ +name:"SIGURG", +number:23, +action:"ignore", +description:"Socket received out-of-band data", +standard:"bsd"}, + +{ +name:"SIGXCPU", +number:24, +action:"core", +description:"Process timed out", +standard:"bsd"}, + +{ +name:"SIGXFSZ", +number:25, +action:"core", +description:"File too big", +standard:"bsd"}, + +{ +name:"SIGVTALRM", +number:26, +action:"terminate", +description:"Timeout or timer", +standard:"bsd"}, + +{ +name:"SIGPROF", +number:27, +action:"terminate", +description:"Timeout or timer", +standard:"bsd"}, + +{ +name:"SIGWINCH", +number:28, +action:"ignore", +description:"Terminal window size changed", +standard:"bsd"}, + +{ +name:"SIGIO", +number:29, +action:"terminate", +description:"I/O is available", +standard:"other"}, + +{ +name:"SIGPOLL", +number:29, +action:"terminate", +description:"Watched event", +standard:"other"}, + +{ +name:"SIGINFO", +number:29, +action:"ignore", +description:"Request for process information", +standard:"other"}, + +{ +name:"SIGPWR", +number:30, +action:"terminate", +description:"Device running out of power", +standard:"systemv"}, + +{ +name:"SIGSYS", +number:31, +action:"core", +description:"Invalid system call", +standard:"other"}, + +{ +name:"SIGUNUSED", +number:31, +action:"terminate", +description:"Invalid system call", +standard:"other"}];exports.SIGNALS=SIGNALS; +//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map new file mode 100644 index 0000000..cbfce26 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/core.js"],"names":["SIGNALS","name","number","action","description","standard","forced"],"mappings":";;AAEO,KAAMA,CAAAA,OAAO,CAAG;AACrB;AACEC,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,iBAJf;AAKEC,QAAQ,CAAE,OALZ,CADqB;;AAQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,+BAJf;AAKEC,QAAQ,CAAE,MALZ,CARqB;;AAerB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,gCAJf;AAKEC,QAAQ,CAAE,OALZ,CAfqB;;AAsBrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,MALZ,CAtBqB;;AA6BrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CA7BqB;;AAoCrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,MALZ,CApCqB;;AA2CrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,KALZ,CA3CqB;;AAkDrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW;AACT,mEALJ;AAMEC,QAAQ,CAAE,KANZ,CAlDqB;;AA0DrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mDAJf;AAKEC,QAAQ,CAAE,OALZ,CA1DqB;;AAiErB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,MALZ,CAjEqB;;AAwErB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAxEqB;;AAgFrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CAhFqB;;AAuFrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,MALZ,CAvFqB;;AA8FrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CA9FqB;;AAqGrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,uBAJf;AAKEC,QAAQ,CAAE,OALZ,CArGqB;;AA4GrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CA5GqB;;AAmHrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,aAJf;AAKEC,QAAQ,CAAE,MALZ,CAnHqB;;AA0HrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,OALZ,CA1HqB;;AAiIrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAjIqB;;AAwIrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAxIqB;;AA+IrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,SAHV;AAIEC,WAAW,CAAE,UAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CA/IqB;;AAuJrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,QAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAvJqB;;AA+JrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oCAJf;AAKEC,QAAQ,CAAE,OALZ,CA/JqB;;AAsKrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,+CAJf;AAKEC,QAAQ,CAAE,OALZ,CAtKqB;;AA6KrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mCAJf;AAKEC,QAAQ,CAAE,OALZ,CA7KqB;;AAoLrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oDAJf;AAKEC,QAAQ,CAAE,OALZ,CApLqB;;AA2LrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,kCAJf;AAKEC,QAAQ,CAAE,KALZ,CA3LqB;;AAkMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,mBAJf;AAKEC,QAAQ,CAAE,KALZ,CAlMqB;;AAyMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,cAJf;AAKEC,QAAQ,CAAE,KALZ,CAzMqB;;AAgNrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAhNqB;;AAuNrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAvNqB;;AA8NrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,KALZ,CA9NqB;;AAqOrB;AACEJ,IAAI,CAAE,OADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CArOqB;;AA4OrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,eAJf;AAKEC,QAAQ,CAAE,OALZ,CA5OqB;;AAmPrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,OALZ,CAnPqB;;AA0PrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,SALZ,CA1PqB;;AAiQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAjQqB;;AAwQrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAxQqB,CAAhB,C","sourcesContent":["/* eslint-disable max-lines */\n// List of known process signals with information about them\nexport const SIGNALS = [\n {\n name: 'SIGHUP',\n number: 1,\n action: 'terminate',\n description: 'Terminal closed',\n standard: 'posix',\n },\n {\n name: 'SIGINT',\n number: 2,\n action: 'terminate',\n description: 'User interruption with CTRL-C',\n standard: 'ansi',\n },\n {\n name: 'SIGQUIT',\n number: 3,\n action: 'core',\n description: 'User interruption with CTRL-\\\\',\n standard: 'posix',\n },\n {\n name: 'SIGILL',\n number: 4,\n action: 'core',\n description: 'Invalid machine instruction',\n standard: 'ansi',\n },\n {\n name: 'SIGTRAP',\n number: 5,\n action: 'core',\n description: 'Debugger breakpoint',\n standard: 'posix',\n },\n {\n name: 'SIGABRT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'ansi',\n },\n {\n name: 'SIGIOT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'bsd',\n },\n {\n name: 'SIGBUS',\n number: 7,\n action: 'core',\n description:\n 'Bus error due to misaligned, non-existing address or paging error',\n standard: 'bsd',\n },\n {\n name: 'SIGEMT',\n number: 7,\n action: 'terminate',\n description: 'Command should be emulated but is not implemented',\n standard: 'other',\n },\n {\n name: 'SIGFPE',\n number: 8,\n action: 'core',\n description: 'Floating point arithmetic error',\n standard: 'ansi',\n },\n {\n name: 'SIGKILL',\n number: 9,\n action: 'terminate',\n description: 'Forced termination',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGUSR1',\n number: 10,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGSEGV',\n number: 11,\n action: 'core',\n description: 'Segmentation fault',\n standard: 'ansi',\n },\n {\n name: 'SIGUSR2',\n number: 12,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGPIPE',\n number: 13,\n action: 'terminate',\n description: 'Broken pipe or socket',\n standard: 'posix',\n },\n {\n name: 'SIGALRM',\n number: 14,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'posix',\n },\n {\n name: 'SIGTERM',\n number: 15,\n action: 'terminate',\n description: 'Termination',\n standard: 'ansi',\n },\n {\n name: 'SIGSTKFLT',\n number: 16,\n action: 'terminate',\n description: 'Stack is empty or overflowed',\n standard: 'other',\n },\n {\n name: 'SIGCHLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'posix',\n },\n {\n name: 'SIGCLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'other',\n },\n {\n name: 'SIGCONT',\n number: 18,\n action: 'unpause',\n description: 'Unpaused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGSTOP',\n number: 19,\n action: 'pause',\n description: 'Paused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGTSTP',\n number: 20,\n action: 'pause',\n description: 'Paused using CTRL-Z or \"suspend\"',\n standard: 'posix',\n },\n {\n name: 'SIGTTIN',\n number: 21,\n action: 'pause',\n description: 'Background process cannot read terminal input',\n standard: 'posix',\n },\n {\n name: 'SIGBREAK',\n number: 21,\n action: 'terminate',\n description: 'User interruption with CTRL-BREAK',\n standard: 'other',\n },\n {\n name: 'SIGTTOU',\n number: 22,\n action: 'pause',\n description: 'Background process cannot write to terminal output',\n standard: 'posix',\n },\n {\n name: 'SIGURG',\n number: 23,\n action: 'ignore',\n description: 'Socket received out-of-band data',\n standard: 'bsd',\n },\n {\n name: 'SIGXCPU',\n number: 24,\n action: 'core',\n description: 'Process timed out',\n standard: 'bsd',\n },\n {\n name: 'SIGXFSZ',\n number: 25,\n action: 'core',\n description: 'File too big',\n standard: 'bsd',\n },\n {\n name: 'SIGVTALRM',\n number: 26,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGPROF',\n number: 27,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGWINCH',\n number: 28,\n action: 'ignore',\n description: 'Terminal window size changed',\n standard: 'bsd',\n },\n {\n name: 'SIGIO',\n number: 29,\n action: 'terminate',\n description: 'I/O is available',\n standard: 'other',\n },\n {\n name: 'SIGPOLL',\n number: 29,\n action: 'terminate',\n description: 'Watched event',\n standard: 'other',\n },\n {\n name: 'SIGINFO',\n number: 29,\n action: 'ignore',\n description: 'Request for process information',\n standard: 'other',\n },\n {\n name: 'SIGPWR',\n number: 30,\n action: 'terminate',\n description: 'Device running out of power',\n standard: 'systemv',\n },\n {\n name: 'SIGSYS',\n number: 31,\n action: 'core',\n description: 'Invalid system call',\n standard: 'other',\n },\n {\n name: 'SIGUNUSED',\n number: 31,\n action: 'terminate',\n description: 'Invalid system call',\n standard: 'other',\n },\n]\n/* eslint-enable max-lines */\n"],"file":"src/core.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts new file mode 100644 index 0000000..2dc5ea7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts @@ -0,0 +1,52 @@ +/** + * Object whose keys are signal names and values are signal objects. + */ +export declare const signalsByName: { [signalName: string]: Signal } +/** + * Object whose keys are signal numbers and values are signal objects. + */ +export declare const signalsByNumber: { [signalNumber: string]: Signal } + +export declare type SignalAction = + | 'terminate' + | 'core' + | 'ignore' + | 'pause' + | 'unpause' +export declare type SignalStandard = + | 'ansi' + | 'posix' + | 'bsd' + | 'systemv' + | 'other' + +export declare type Signal = { + /** + * Standard name of the signal, for example 'SIGINT'. + */ + name: string + /** + * Code number of the signal, for example 2. While most number are cross-platform, some are different between different OS. + */ + number: number + /** + * Human-friendly description for the signal, for example 'User interruption with CTRL-C'. + */ + description: string + /** + * Whether the current OS can handle this signal in Node.js using process.on(name, handler). The list of supported signals is OS-specific. + */ + supported: boolean + /** + * What is the default action for this signal when it is not handled. + */ + action: SignalAction + /** + * Whether the signal's default action cannot be prevented. This is true for SIGTERM, SIGKILL and SIGSTOP. + */ + forced: boolean + /** + * Which standard defined that signal. + */ + standard: SignalStandard +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js new file mode 100644 index 0000000..88f5fd2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js @@ -0,0 +1,71 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.signalsByNumber=exports.signalsByName=void 0;var _os=require("os"); + +var _signals=require("./signals.js"); +var _realtime=require("./realtime.js"); + + + +const getSignalsByName=function(){ +const signals=(0,_signals.getSignals)(); +return signals.reduce(getSignalByName,{}); +}; + +const getSignalByName=function( +signalByNameMemo, +{name,number,description,supported,action,forced,standard}) +{ +return{ +...signalByNameMemo, +[name]:{name,number,description,supported,action,forced,standard}}; + +}; + +const signalsByName=getSignalsByName();exports.signalsByName=signalsByName; + + + + +const getSignalsByNumber=function(){ +const signals=(0,_signals.getSignals)(); +const length=_realtime.SIGRTMAX+1; +const signalsA=Array.from({length},(value,number)=> +getSignalByNumber(number,signals)); + +return Object.assign({},...signalsA); +}; + +const getSignalByNumber=function(number,signals){ +const signal=findSignalByNumber(number,signals); + +if(signal===undefined){ +return{}; +} + +const{name,description,supported,action,forced,standard}=signal; +return{ +[number]:{ +name, +number, +description, +supported, +action, +forced, +standard}}; + + +}; + + + +const findSignalByNumber=function(number,signals){ +const signal=signals.find(({name})=>_os.constants.signals[name]===number); + +if(signal!==undefined){ +return signal; +} + +return signals.find(signalA=>signalA.number===number); +}; + +const signalsByNumber=getSignalsByNumber();exports.signalsByNumber=signalsByNumber; +//# sourceMappingURL=main.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map new file mode 100644 index 0000000..3fdcede --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/main.js"],"names":["getSignalsByName","signals","reduce","getSignalByName","signalByNameMemo","name","number","description","supported","action","forced","standard","signalsByName","getSignalsByNumber","length","SIGRTMAX","signalsA","Array","from","value","getSignalByNumber","Object","assign","signal","findSignalByNumber","undefined","find","constants","signalA","signalsByNumber"],"mappings":"2HAAA;;AAEA;AACA;;;;AAIA,KAAMA,CAAAA,gBAAgB,CAAG,UAAW;AAClC,KAAMC,CAAAA,OAAO,CAAG,yBAAhB;AACA,MAAOA,CAAAA,OAAO,CAACC,MAAR,CAAeC,eAAf,CAAgC,EAAhC,CAAP;AACD,CAHD;;AAKA,KAAMA,CAAAA,eAAe,CAAG;AACtBC,gBADsB;AAEtB,CAAEC,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFsB;AAGtB;AACA,MAAO;AACL,GAAGP,gBADE;AAEL,CAACC,IAAD,EAAQ,CAAEA,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFH,CAAP;;AAID,CARD;;AAUO,KAAMC,CAAAA,aAAa,CAAGZ,gBAAgB,EAAtC,C;;;;;AAKP,KAAMa,CAAAA,kBAAkB,CAAG,UAAW;AACpC,KAAMZ,CAAAA,OAAO,CAAG,yBAAhB;AACA,KAAMa,CAAAA,MAAM,CAAGC,mBAAW,CAA1B;AACA,KAAMC,CAAAA,QAAQ,CAAGC,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuB,CAACK,KAAD,CAAQb,MAAR;AACtCc,iBAAiB,CAACd,MAAD,CAASL,OAAT,CADF,CAAjB;;AAGA,MAAOoB,CAAAA,MAAM,CAACC,MAAP,CAAc,EAAd,CAAkB,GAAGN,QAArB,CAAP;AACD,CAPD;;AASA,KAAMI,CAAAA,iBAAiB,CAAG,SAASd,MAAT,CAAiBL,OAAjB,CAA0B;AAClD,KAAMsB,CAAAA,MAAM,CAAGC,kBAAkB,CAAClB,MAAD,CAASL,OAAT,CAAjC;;AAEA,GAAIsB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAO,EAAP;AACD;;AAED,KAAM,CAAEpB,IAAF,CAAQE,WAAR,CAAqBC,SAArB,CAAgCC,MAAhC,CAAwCC,MAAxC,CAAgDC,QAAhD,EAA6DY,MAAnE;AACA,MAAO;AACL,CAACjB,MAAD,EAAU;AACRD,IADQ;AAERC,MAFQ;AAGRC,WAHQ;AAIRC,SAJQ;AAKRC,MALQ;AAMRC,MANQ;AAORC,QAPQ,CADL,CAAP;;;AAWD,CAnBD;;;;AAuBA,KAAMa,CAAAA,kBAAkB,CAAG,SAASlB,MAAT,CAAiBL,OAAjB,CAA0B;AACnD,KAAMsB,CAAAA,MAAM,CAAGtB,OAAO,CAACyB,IAAR,CAAa,CAAC,CAAErB,IAAF,CAAD,GAAcsB,cAAU1B,OAAV,CAAkBI,IAAlB,IAA4BC,MAAvD,CAAf;;AAEA,GAAIiB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAOF,CAAAA,MAAP;AACD;;AAED,MAAOtB,CAAAA,OAAO,CAACyB,IAAR,CAAaE,OAAO,EAAIA,OAAO,CAACtB,MAAR,GAAmBA,MAA3C,CAAP;AACD,CARD;;AAUO,KAAMuB,CAAAA,eAAe,CAAGhB,kBAAkB,EAA1C,C","sourcesContent":["import { constants } from 'os'\n\nimport { getSignals } from './signals.js'\nimport { SIGRTMAX } from './realtime.js'\n\n// Retrieve `signalsByName`, an object mapping signal name to signal properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByName = function() {\n const signals = getSignals()\n return signals.reduce(getSignalByName, {})\n}\n\nconst getSignalByName = function(\n signalByNameMemo,\n { name, number, description, supported, action, forced, standard },\n) {\n return {\n ...signalByNameMemo,\n [name]: { name, number, description, supported, action, forced, standard },\n }\n}\n\nexport const signalsByName = getSignalsByName()\n\n// Retrieve `signalsByNumber`, an object mapping signal number to signal\n// properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByNumber = function() {\n const signals = getSignals()\n const length = SIGRTMAX + 1\n const signalsA = Array.from({ length }, (value, number) =>\n getSignalByNumber(number, signals),\n )\n return Object.assign({}, ...signalsA)\n}\n\nconst getSignalByNumber = function(number, signals) {\n const signal = findSignalByNumber(number, signals)\n\n if (signal === undefined) {\n return {}\n }\n\n const { name, description, supported, action, forced, standard } = signal\n return {\n [number]: {\n name,\n number,\n description,\n supported,\n action,\n forced,\n standard,\n },\n }\n}\n\n// Several signals might end up sharing the same number because of OS-specific\n// numbers, in which case those prevail.\nconst findSignalByNumber = function(number, signals) {\n const signal = signals.find(({ name }) => constants.signals[name] === number)\n\n if (signal !== undefined) {\n return signal\n }\n\n return signals.find(signalA => signalA.number === number)\n}\n\nexport const signalsByNumber = getSignalsByNumber()\n"],"file":"src/main.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js new file mode 100644 index 0000000..f665516 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js @@ -0,0 +1,19 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGRTMAX=exports.getRealtimeSignals=void 0; +const getRealtimeSignals=function(){ +const length=SIGRTMAX-SIGRTMIN+1; +return Array.from({length},getRealtimeSignal); +};exports.getRealtimeSignals=getRealtimeSignals; + +const getRealtimeSignal=function(value,index){ +return{ +name:`SIGRT${index+1}`, +number:SIGRTMIN+index, +action:"terminate", +description:"Application-specific signal (realtime)", +standard:"posix"}; + +}; + +const SIGRTMIN=34; +const SIGRTMAX=64;exports.SIGRTMAX=SIGRTMAX; +//# sourceMappingURL=realtime.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map new file mode 100644 index 0000000..808bbd1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/realtime.js"],"names":["getRealtimeSignals","length","SIGRTMAX","SIGRTMIN","Array","from","getRealtimeSignal","value","index","name","number","action","description","standard"],"mappings":";AACO,KAAMA,CAAAA,kBAAkB,CAAG,UAAW;AAC3C,KAAMC,CAAAA,MAAM,CAAGC,QAAQ,CAAGC,QAAX,CAAsB,CAArC;AACA,MAAOC,CAAAA,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuBK,iBAAvB,CAAP;AACD,CAHM,C;;AAKP,KAAMA,CAAAA,iBAAiB,CAAG,SAASC,KAAT,CAAgBC,KAAhB,CAAuB;AAC/C,MAAO;AACLC,IAAI,CAAG,QAAOD,KAAK,CAAG,CAAE,EADnB;AAELE,MAAM,CAAEP,QAAQ,CAAGK,KAFd;AAGLG,MAAM,CAAE,WAHH;AAILC,WAAW,CAAE,wCAJR;AAKLC,QAAQ,CAAE,OALL,CAAP;;AAOD,CARD;;AAUA,KAAMV,CAAAA,QAAQ,CAAG,EAAjB;AACO,KAAMD,CAAAA,QAAQ,CAAG,EAAjB,C","sourcesContent":["// List of realtime signals with information about them\nexport const getRealtimeSignals = function() {\n const length = SIGRTMAX - SIGRTMIN + 1\n return Array.from({ length }, getRealtimeSignal)\n}\n\nconst getRealtimeSignal = function(value, index) {\n return {\n name: `SIGRT${index + 1}`,\n number: SIGRTMIN + index,\n action: 'terminate',\n description: 'Application-specific signal (realtime)',\n standard: 'posix',\n }\n}\n\nconst SIGRTMIN = 34\nexport const SIGRTMAX = 64\n"],"file":"src/realtime.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js new file mode 100644 index 0000000..ab3b387 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js @@ -0,0 +1,35 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.getSignals=void 0;var _os=require("os"); + +var _core=require("./core.js"); +var _realtime=require("./realtime.js"); + + + +const getSignals=function(){ +const realtimeSignals=(0,_realtime.getRealtimeSignals)(); +const signals=[..._core.SIGNALS,...realtimeSignals].map(normalizeSignal); +return signals; +};exports.getSignals=getSignals; + + + + + + + +const normalizeSignal=function({ +name, +number:defaultNumber, +description, +action, +forced=false, +standard}) +{ +const{ +signals:{[name]:constantSignal}}= +_os.constants; +const supported=constantSignal!==undefined; +const number=supported?constantSignal:defaultNumber; +return{name,number,description,supported,action,forced,standard}; +}; +//# sourceMappingURL=signals.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map new file mode 100644 index 0000000..2a6b919 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map @@ -0,0 +1 @@ +{"version":3,"sources":["../../src/signals.js"],"names":["getSignals","realtimeSignals","signals","SIGNALS","map","normalizeSignal","name","number","defaultNumber","description","action","forced","standard","constantSignal","constants","supported","undefined"],"mappings":"gGAAA;;AAEA;AACA;;;;AAIO,KAAMA,CAAAA,UAAU,CAAG,UAAW;AACnC,KAAMC,CAAAA,eAAe,CAAG,kCAAxB;AACA,KAAMC,CAAAA,OAAO,CAAG,CAAC,GAAGC,aAAJ,CAAa,GAAGF,eAAhB,EAAiCG,GAAjC,CAAqCC,eAArC,CAAhB;AACA,MAAOH,CAAAA,OAAP;AACD,CAJM,C;;;;;;;;AAYP,KAAMG,CAAAA,eAAe,CAAG,SAAS;AAC/BC,IAD+B;AAE/BC,MAAM,CAAEC,aAFuB;AAG/BC,WAH+B;AAI/BC,MAJ+B;AAK/BC,MAAM,CAAG,KALsB;AAM/BC,QAN+B,CAAT;AAOrB;AACD,KAAM;AACJV,OAAO,CAAE,CAAE,CAACI,IAAD,EAAQO,cAAV,CADL;AAEFC,aAFJ;AAGA,KAAMC,CAAAA,SAAS,CAAGF,cAAc,GAAKG,SAArC;AACA,KAAMT,CAAAA,MAAM,CAAGQ,SAAS,CAAGF,cAAH,CAAoBL,aAA5C;AACA,MAAO,CAAEF,IAAF,CAAQC,MAAR,CAAgBE,WAAhB,CAA6BM,SAA7B,CAAwCL,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAAP;AACD,CAdD","sourcesContent":["import { constants } from 'os'\n\nimport { SIGNALS } from './core.js'\nimport { getRealtimeSignals } from './realtime.js'\n\n// Retrieve list of know signals (including realtime) with information about\n// them\nexport const getSignals = function() {\n const realtimeSignals = getRealtimeSignals()\n const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)\n return signals\n}\n\n// Normalize signal:\n// - `number`: signal numbers are OS-specific. This is taken into account by\n// `os.constants.signals`. However we provide a default `number` since some\n// signals are not defined for some OS.\n// - `forced`: set default to `false`\n// - `supported`: set value\nconst normalizeSignal = function({\n name,\n number: defaultNumber,\n description,\n action,\n forced = false,\n standard,\n}) {\n const {\n signals: { [name]: constantSignal },\n } = constants\n const supported = constantSignal !== undefined\n const number = supported ? constantSignal : defaultNumber\n return { name, number, description, supported, action, forced, standard }\n}\n"],"file":"src/signals.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json new file mode 100644 index 0000000..fd1d027 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json @@ -0,0 +1,64 @@ +{ + "name": "human-signals", + "version": "2.1.0", + "main": "build/src/main.js", + "files": [ + "build/src", + "!~" + ], + "scripts": { + "test": "gulp test" + }, + "husky": { + "hooks": { + "pre-push": "gulp check --full" + } + }, + "description": "Human-friendly process signals", + "keywords": [ + "signal", + "signals", + "handlers", + "error-handling", + "errors", + "interrupts", + "sigterm", + "sigint", + "irq", + "process", + "exit", + "exit-code", + "status", + "operating-system", + "es6", + "javascript", + "linux", + "macos", + "windows", + "nodejs" + ], + "license": "Apache-2.0", + "homepage": "https://git.io/JeluP", + "repository": "ehmicky/human-signals", + "bugs": { + "url": "https://github.com/ehmicky/human-signals/issues" + }, + "author": "ehmicky (https://github.com/ehmicky)", + "directories": { + "lib": "src", + "test": "test" + }, + "types": "build/src/main.d.ts", + "dependencies": {}, + "devDependencies": { + "@ehmicky/dev-tasks": "^0.31.9", + "ajv": "^6.12.0", + "ava": "^3.5.0", + "gulp": "^4.0.2", + "husky": "^4.2.3", + "test-each": "^2.0.0" + }, + "engines": { + "node": ">=10.17.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE new file mode 100644 index 0000000..842218c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2016, Jon Schlinkert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md new file mode 100644 index 0000000..0416af5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md @@ -0,0 +1,107 @@ +# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) + +> Returns true if a string has an extglob. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-extglob +``` + +## Usage + +```js +var isExtglob = require('is-extglob'); +``` + +**True** + +```js +isExtglob('?(abc)'); +isExtglob('@(abc)'); +isExtglob('!(abc)'); +isExtglob('*(abc)'); +isExtglob('+(abc)'); +``` + +**False** + +Escaped extglobs: + +```js +isExtglob('\\?(abc)'); +isExtglob('\\@(abc)'); +isExtglob('\\!(abc)'); +isExtglob('\\*(abc)'); +isExtglob('\\+(abc)'); +``` + +Everything else... + +```js +isExtglob('foo.js'); +isExtglob('!foo.js'); +isExtglob('*.js'); +isExtglob('**/abc.js'); +isExtglob('abc/*.js'); +isExtglob('abc/(aaa|bbb).js'); +isExtglob('abc/[a-z].js'); +isExtglob('abc/{a,b}.js'); +isExtglob('abc/?.js'); +isExtglob('abc.js'); +isExtglob('abc/def/ghi.js'); +``` + +## History + +**v2.0** + +Adds support for escaping. Escaped exglobs no longer return true. + +## About + +### Related projects + +* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") +* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") + +### Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +### Building docs + +_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ + +To generate the readme and API documentation with [verb](https://github.com/verbose/verb): + +```sh +$ npm install -g verb verb-generate-readme && verb +``` + +### Running tests + +Install dev dependencies: + +```sh +$ npm install -d && npm test +``` + +### Author + +**Jon Schlinkert** + +* [github/jonschlinkert](https://github.com/jonschlinkert) +* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) + +### License + +Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js new file mode 100644 index 0000000..c1d986f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js @@ -0,0 +1,20 @@ +/*! + * is-extglob + * + * Copyright (c) 2014-2016, Jon Schlinkert. + * Licensed under the MIT License. + */ + +module.exports = function isExtglob(str) { + if (typeof str !== 'string' || str === '') { + return false; + } + + var match; + while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { + if (match[2]) return true; + str = str.slice(match.index + match[0].length); + } + + return false; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json new file mode 100644 index 0000000..7a90836 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json @@ -0,0 +1,69 @@ +{ + "name": "is-extglob", + "description": "Returns true if a string has an extglob.", + "version": "2.1.1", + "homepage": "https://github.com/jonschlinkert/is-extglob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "repository": "jonschlinkert/is-extglob", + "bugs": { + "url": "https://github.com/jonschlinkert/is-extglob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "has-glob", + "is-glob", + "micromatch" + ] + }, + "reflinks": [ + "verb", + "verb-generate-readme" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE new file mode 100644 index 0000000..3f2eca1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md new file mode 100644 index 0000000..740724b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md @@ -0,0 +1,206 @@ +# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) + +> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-glob +``` + +You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). + +## Usage + +```js +var isGlob = require('is-glob'); +``` + +### Default behavior + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js'); +isGlob('*.js'); +isGlob('**/abc.js'); +isGlob('abc/*.js'); +isGlob('abc/(aaa|bbb).js'); +isGlob('abc/[a-z].js'); +isGlob('abc/{a,b}.js'); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js'); +isGlob('abc/!(a).js'); +isGlob('abc/+(a).js'); +isGlob('abc/*(a).js'); +isGlob('abc/?(a).js'); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('abc/\\@(a).js'); +isGlob('abc/\\!(a).js'); +isGlob('abc/\\+(a).js'); +isGlob('abc/\\*(a).js'); +isGlob('abc/\\?(a).js'); +isGlob('\\!foo.js'); +isGlob('\\*.js'); +isGlob('\\*\\*/abc.js'); +isGlob('abc/\\*.js'); +isGlob('abc/\\(aaa|bbb).js'); +isGlob('abc/\\[a-z].js'); +isGlob('abc/\\{a,b}.js'); +//=> false +``` + +Patterns that do not have glob patterns return `false`: + +```js +isGlob('abc.js'); +isGlob('abc/def/ghi.js'); +isGlob('foo.js'); +isGlob('abc/@.js'); +isGlob('abc/+.js'); +isGlob('abc/?.js'); +isGlob(); +isGlob(null); +//=> false +``` + +Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): + +```js +isGlob(['**/*.js']); +isGlob(['foo.js']); +//=> false +``` + +### Option strict + +When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that +some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. + +**True** + +Patterns that have glob characters or regex patterns will return `true`: + +```js +isGlob('!foo.js', {strict: false}); +isGlob('*.js', {strict: false}); +isGlob('**/abc.js', {strict: false}); +isGlob('abc/*.js', {strict: false}); +isGlob('abc/(aaa|bbb).js', {strict: false}); +isGlob('abc/[a-z].js', {strict: false}); +isGlob('abc/{a,b}.js', {strict: false}); +//=> true +``` + +Extglobs + +```js +isGlob('abc/@(a).js', {strict: false}); +isGlob('abc/!(a).js', {strict: false}); +isGlob('abc/+(a).js', {strict: false}); +isGlob('abc/*(a).js', {strict: false}); +isGlob('abc/?(a).js', {strict: false}); +//=> true +``` + +**False** + +Escaped globs or extglobs return `false`: + +```js +isGlob('\\!foo.js', {strict: false}); +isGlob('\\*.js', {strict: false}); +isGlob('\\*\\*/abc.js', {strict: false}); +isGlob('abc/\\*.js', {strict: false}); +isGlob('abc/\\(aaa|bbb).js', {strict: false}); +isGlob('abc/\\[a-z].js', {strict: false}); +isGlob('abc/\\{a,b}.js', {strict: false}); +//=> false +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") +* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") +* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") +* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 47 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [doowb](https://github.com/doowb) | +| 1 | [phated](https://github.com/phated) | +| 1 | [danhper](https://github.com/danhper) | +| 1 | [paulmillr](https://github.com/paulmillr) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js new file mode 100644 index 0000000..620f563 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js @@ -0,0 +1,150 @@ +/*! + * is-glob + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +var isExtglob = require('is-extglob'); +var chars = { '{': '}', '(': ')', '[': ']'}; +var strictCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + var pipeIndex = -2; + var closeSquareIndex = -2; + var closeCurlyIndex = -2; + var closeParenIndex = -2; + var backSlashIndex = -2; + while (index < str.length) { + if (str[index] === '*') { + return true; + } + + if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { + return true; + } + + if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { + if (closeSquareIndex < index) { + closeSquareIndex = str.indexOf(']', index); + } + if (closeSquareIndex > index) { + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { + return true; + } + } + } + + if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { + closeCurlyIndex = str.indexOf('}', index); + if (closeCurlyIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { + return true; + } + } + } + + if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { + closeParenIndex = str.indexOf(')', index); + if (closeParenIndex > index) { + backSlashIndex = str.indexOf('\\', index); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + + if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { + if (pipeIndex < index) { + pipeIndex = str.indexOf('|', index); + } + if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { + closeParenIndex = str.indexOf(')', pipeIndex); + if (closeParenIndex > pipeIndex) { + backSlashIndex = str.indexOf('\\', pipeIndex); + if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { + return true; + } + } + } + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +var relaxedCheck = function(str) { + if (str[0] === '!') { + return true; + } + var index = 0; + while (index < str.length) { + if (/[*?{}()[\]]/.test(str[index])) { + return true; + } + + if (str[index] === '\\') { + var open = str[index + 1]; + index += 2; + var close = chars[open]; + + if (close) { + var n = str.indexOf(close, index); + if (n !== -1) { + index = n + 1; + } + } + + if (str[index] === '!') { + return true; + } + } else { + index++; + } + } + return false; +}; + +module.exports = function isGlob(str, options) { + if (typeof str !== 'string' || str === '') { + return false; + } + + if (isExtglob(str)) { + return true; + } + + var check = strictCheck; + + // optionally relax check + if (options && options.strict === false) { + check = relaxedCheck; + } + + return check(str); +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json new file mode 100644 index 0000000..858af03 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json @@ -0,0 +1,81 @@ +{ + "name": "is-glob", + "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", + "version": "4.0.3", + "homepage": "https://github.com/micromatch/is-glob", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Brian Woodward (https://twitter.com/doowb)", + "Daniel Perez (https://tuvistavie.com)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)" + ], + "repository": "micromatch/is-glob", + "bugs": { + "url": "https://github.com/micromatch/is-glob/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha && node benchmark.js" + }, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "devDependencies": { + "gulp-format-md": "^0.1.10", + "mocha": "^3.0.2" + }, + "keywords": [ + "bash", + "braces", + "check", + "exec", + "expression", + "extglob", + "glob", + "globbing", + "globstar", + "is", + "match", + "matches", + "pattern", + "regex", + "regular", + "string", + "test" + ], + "verb": { + "layout": "default", + "plugins": [ + "gulp-format-md" + ], + "related": { + "list": [ + "assemble", + "base", + "update", + "verb" + ] + }, + "reflinks": [ + "assemble", + "bach", + "base", + "composer", + "gulp", + "has-glob", + "is-valid-glob", + "micromatch", + "npm", + "scaffold", + "verb", + "vinyl" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE new file mode 100644 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md new file mode 100644 index 0000000..eb8149e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md @@ -0,0 +1,187 @@ +# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) + +> Returns true if the value is a finite number. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save is-number +``` + +## Why is this needed? + +In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: + +```js +console.log(+[]); //=> 0 +console.log(+''); //=> 0 +console.log(+' '); //=> 0 +console.log(typeof NaN); //=> 'number' +``` + +This library offers a performant way to smooth out edge cases like these. + +## Usage + +```js +const isNumber = require('is-number'); +``` + +See the [tests](./test.js) for more examples. + +### true + +```js +isNumber(5e3); // true +isNumber(0xff); // true +isNumber(-1.1); // true +isNumber(0); // true +isNumber(1); // true +isNumber(1.1); // true +isNumber(10); // true +isNumber(10.10); // true +isNumber(100); // true +isNumber('-1.1'); // true +isNumber('0'); // true +isNumber('012'); // true +isNumber('0xff'); // true +isNumber('1'); // true +isNumber('1.1'); // true +isNumber('10'); // true +isNumber('10.10'); // true +isNumber('100'); // true +isNumber('5e3'); // true +isNumber(parseInt('012')); // true +isNumber(parseFloat('012')); // true +``` + +### False + +Everything else is false, as you would expect: + +```js +isNumber(Infinity); // false +isNumber(NaN); // false +isNumber(null); // false +isNumber(undefined); // false +isNumber(''); // false +isNumber(' '); // false +isNumber('foo'); // false +isNumber([1]); // false +isNumber([]); // false +isNumber(function () {}); // false +isNumber({}); // false +``` + +## Release history + +### 7.0.0 + +* Refactor. Now uses `.isFinite` if it exists. +* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. + +### 6.0.0 + +* Optimizations, thanks to @benaadams. + +### 5.0.0 + +**Breaking changes** + +* removed support for `instanceof Number` and `instanceof String` + +## Benchmarks + +As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. + +``` +# all +v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) +v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) +parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) +fastest is 'v7.0' + +# string +v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) +v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) +parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) +fastest is 'parseFloat,v7.0' + +# number +v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) +v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) +parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) +fastest is 'v6.0' +``` + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") +* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") +* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") +* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 49 | [jonschlinkert](https://github.com/jonschlinkert) | +| 5 | [charlike-old](https://github.com/charlike-old) | +| 1 | [benaadams](https://github.com/benaadams) | +| 1 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) + +### License + +Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js new file mode 100644 index 0000000..27f19b7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js @@ -0,0 +1,18 @@ +/*! + * is-number + * + * Copyright (c) 2014-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +module.exports = function(num) { + if (typeof num === 'number') { + return num - num === 0; + } + if (typeof num === 'string' && num.trim() !== '') { + return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); + } + return false; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json new file mode 100644 index 0000000..3715072 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json @@ -0,0 +1,82 @@ +{ + "name": "is-number", + "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", + "version": "7.0.0", + "homepage": "https://github.com/jonschlinkert/is-number", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Olsten Larck (https://i.am.charlike.online)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "jonschlinkert/is-number", + "bugs": { + "url": "https://github.com/jonschlinkert/is-number/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=0.12.0" + }, + "scripts": { + "test": "mocha" + }, + "devDependencies": { + "ansi": "^0.3.1", + "benchmark": "^2.1.4", + "gulp-format-md": "^1.0.0", + "mocha": "^3.5.3" + }, + "keywords": [ + "cast", + "check", + "coerce", + "coercion", + "finite", + "integer", + "is", + "isnan", + "is-nan", + "is-num", + "is-number", + "isnumber", + "isfinite", + "istype", + "kind", + "math", + "nan", + "num", + "number", + "numeric", + "parseFloat", + "parseInt", + "test", + "type", + "typeof", + "value" + ], + "verb": { + "toc": false, + "layout": "default", + "tasks": [ + "readme" + ], + "related": { + "list": [ + "is-plain-object", + "is-primitive", + "isobject", + "kind-of" + ] + }, + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts new file mode 100644 index 0000000..eee2e83 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts @@ -0,0 +1,79 @@ +import * as stream from 'stream'; + +declare const isStream: { + /** + @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream(fs.createReadStream('unicorn.png')); + //=> true + + isStream({}); + //=> false + ``` + */ + (stream: unknown): stream is stream.Stream; + + /** + @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.writable(fs.createWriteStrem('unicorn.txt')); + //=> true + ``` + */ + writable(stream: unknown): stream is stream.Writable; + + /** + @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + + @example + ``` + import * as fs from 'fs'; + import isStream = require('is-stream'); + + isStream.readable(fs.createReadStream('unicorn.png')); + //=> true + ``` + */ + readable(stream: unknown): stream is stream.Readable; + + /** + @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + + @example + ``` + import {Duplex} from 'stream'; + import isStream = require('is-stream'); + + isStream.duplex(new Duplex()); + //=> true + ``` + */ + duplex(stream: unknown): stream is stream.Duplex; + + /** + @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + + @example + ``` + import * as fs from 'fs'; + import Stringify = require('streaming-json-stringify'); + import isStream = require('is-stream'); + + isStream.transform(Stringify()); + //=> true + ``` + */ + transform(input: unknown): input is stream.Transform; +}; + +export = isStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js new file mode 100644 index 0000000..2e43434 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js @@ -0,0 +1,28 @@ +'use strict'; + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json new file mode 100644 index 0000000..c3b5673 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-stream", + "version": "2.0.1", + "description": "Check if something is a Node.js stream", + "license": "MIT", + "repository": "sindresorhus/is-stream", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "stream", + "type", + "streams", + "writable", + "readable", + "duplex", + "transform", + "check", + "detect", + "is" + ], + "devDependencies": { + "@types/node": "^11.13.6", + "ava": "^1.4.1", + "tempy": "^0.3.0", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md new file mode 100644 index 0000000..19308e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md @@ -0,0 +1,60 @@ +# is-stream + +> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) + +## Install + +``` +$ npm install is-stream +``` + +## Usage + +```js +const fs = require('fs'); +const isStream = require('is-stream'); + +isStream(fs.createReadStream('unicorn.png')); +//=> true + +isStream({}); +//=> false +``` + +## API + +### isStream(stream) + +Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). + +#### isStream.writable(stream) + +Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). + +#### isStream.readable(stream) + +Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). + +#### isStream.duplex(stream) + +Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +#### isStream.transform(stream) + +Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). + +## Related + +- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore new file mode 100644 index 0000000..c1cb757 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore @@ -0,0 +1,2 @@ +.nyc_output/ +coverage/ diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md new file mode 100644 index 0000000..35769e8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md @@ -0,0 +1,51 @@ +# isexe + +Minimal module to check if a file is executable, and a normal file. + +Uses `fs.stat` and tests against the `PATHEXT` environment variable on +Windows. + +## USAGE + +```javascript +var isexe = require('isexe') +isexe('some-file-name', function (err, isExe) { + if (err) { + console.error('probably file does not exist or something', err) + } else if (isExe) { + console.error('this thing can be run') + } else { + console.error('cannot be run') + } +}) + +// same thing but synchronous, throws errors +var isExe = isexe.sync('some-file-name') + +// treat errors as just "not executable" +isexe('maybe-missing-file', { ignoreErrors: true }, callback) +var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) +``` + +## API + +### `isexe(path, [options], [callback])` + +Check if the path is executable. If no callback provided, and a +global `Promise` object is available, then a Promise will be returned. + +Will raise whatever errors may be raised by `fs.stat`, unless +`options.ignoreErrors` is set to true. + +### `isexe.sync(path, [options])` + +Same as `isexe` but returns the value and throws any errors raised. + +### Options + +* `ignoreErrors` Treat all errors as "no, this is not executable", but + don't raise them. +* `uid` Number to use as the user id +* `gid` Number to use as the group id +* `pathExt` List of path extensions to use instead of `PATHEXT` + environment variable on Windows. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js new file mode 100644 index 0000000..553fb32 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js @@ -0,0 +1,57 @@ +var fs = require('fs') +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = require('./windows.js') +} else { + core = require('./mode.js') +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js new file mode 100644 index 0000000..1995ea4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js @@ -0,0 +1,41 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json new file mode 100644 index 0000000..e452689 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json @@ -0,0 +1,31 @@ +{ + "name": "isexe", + "version": "2.0.0", + "description": "Minimal module to check if a file is executable.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.5.0", + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/isexe.git" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/isaacs/isexe/issues" + }, + "homepage": "https://github.com/isaacs/isexe#readme" +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js new file mode 100644 index 0000000..d926df6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js @@ -0,0 +1,221 @@ +var t = require('tap') +var fs = require('fs') +var path = require('path') +var fixture = path.resolve(__dirname, 'fixtures') +var meow = fixture + '/meow.cat' +var mine = fixture + '/mine.cat' +var ours = fixture + '/ours.cat' +var fail = fixture + '/fail.false' +var noent = fixture + '/enoent.exe' +var mkdirp = require('mkdirp') +var rimraf = require('rimraf') + +var isWindows = process.platform === 'win32' +var hasAccess = typeof fs.access === 'function' +var winSkip = isWindows && 'windows' +var accessSkip = !hasAccess && 'no fs.access function' +var hasPromise = typeof Promise === 'function' +var promiseSkip = !hasPromise && 'no global Promise' + +function reset () { + delete require.cache[require.resolve('../')] + return require('../') +} + +t.test('setup fixtures', function (t) { + rimraf.sync(fixture) + mkdirp.sync(fixture) + fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') + fs.chmodSync(meow, parseInt('0755', 8)) + fs.writeFileSync(fail, '#!/usr/bin/env false\n') + fs.chmodSync(fail, parseInt('0644', 8)) + fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') + fs.chmodSync(mine, parseInt('0744', 8)) + fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') + fs.chmodSync(ours, parseInt('0754', 8)) + t.end() +}) + +t.test('promise', { skip: promiseSkip }, function (t) { + var isexe = reset() + t.test('meow async', function (t) { + isexe(meow).then(function (is) { + t.ok(is) + t.end() + }) + }) + t.test('fail async', function (t) { + isexe(fail).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.test('noent async', function (t) { + isexe(noent).catch(function (er) { + t.ok(er) + t.end() + }) + }) + t.test('noent ignore async', function (t) { + isexe(noent, { ignoreErrors: true }).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.end() +}) + +t.test('no promise', function (t) { + global.Promise = null + var isexe = reset() + t.throws('try to meow a promise', function () { + isexe(meow) + }) + t.end() +}) + +t.test('access', { skip: accessSkip || winSkip }, function (t) { + runTest(t) +}) + +t.test('mode', { skip: winSkip }, function (t) { + delete fs.access + delete fs.accessSync + var isexe = reset() + t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) + t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) + runTest(t) +}) + +t.test('windows', function (t) { + global.TESTING_WINDOWS = true + var pathExt = '.EXE;.CAT;.CMD;.COM' + t.test('pathExt option', function (t) { + runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) + }) + t.test('pathExt env', function (t) { + process.env.PATHEXT = pathExt + runTest(t) + }) + t.test('no pathExt', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: '', skipFail: true }) + }) + t.test('pathext with empty entry', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: ';' + pathExt, skipFail: true }) + }) + t.end() +}) + +t.test('cleanup', function (t) { + rimraf.sync(fixture) + t.end() +}) + +function runTest (t, options) { + var isexe = reset() + + var optionsIgnore = Object.create(options || {}) + optionsIgnore.ignoreErrors = true + + if (!options || !options.skipFail) { + t.notOk(isexe.sync(fail, options)) + } + t.notOk(isexe.sync(noent, optionsIgnore)) + if (!options) { + t.ok(isexe.sync(meow)) + } else { + t.ok(isexe.sync(meow, options)) + } + + t.ok(isexe.sync(mine, options)) + t.ok(isexe.sync(ours, options)) + t.throws(function () { + isexe.sync(noent, options) + }) + + t.test('meow async', function (t) { + if (!options) { + isexe(meow, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } else { + isexe(meow, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } + }) + + t.test('mine async', function (t) { + isexe(mine, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + t.test('ours async', function (t) { + isexe(ours, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + if (!options || !options.skipFail) { + t.test('fail async', function (t) { + isexe(fail, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + } + + t.test('noent async', function (t) { + isexe(noent, options, function (er, is) { + t.ok(er) + t.notOk(is) + t.end() + }) + }) + + t.test('noent ignore async', function (t) { + isexe(noent, optionsIgnore, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.test('directory is not executable', function (t) { + isexe(__dirname, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.end() +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js new file mode 100644 index 0000000..3499673 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js @@ -0,0 +1,42 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} + +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE new file mode 100644 index 0000000..94a4c0a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Stephen Sugden (stephensugden.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md new file mode 100644 index 0000000..0d54841 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md @@ -0,0 +1,78 @@ +# merge-stream + +Merge (interleave) a bunch of streams. + +[![build status](https://secure.travis-ci.org/grncdr/merge-stream.svg?branch=master)](http://travis-ci.org/grncdr/merge-stream) + +## Synopsis + +```javascript +var stream1 = new Stream(); +var stream2 = new Stream(); + +var merged = mergeStream(stream1, stream2); + +var stream3 = new Stream(); +merged.add(stream3); +merged.isEmpty(); +//=> false +``` + +## Description + +This is adapted from [event-stream](https://github.com/dominictarr/event-stream) separated into a new module, using Streams3. + +## API + +### `mergeStream` + +Type: `function` + +Merges an arbitrary number of streams. Returns a merged stream. + +#### `merged.add` + +A method to dynamically add more sources to the stream. The argument supplied to `add` can be either a source or an array of sources. + +#### `merged.isEmpty` + +A method that tells you if the merged stream is empty. + +When a stream is "empty" (aka. no sources were added), it could not be returned to a gulp task. + +So, we could do something like this: + +```js +stream = require('merge-stream')(); +// Something like a loop to add some streams to the merge stream +// stream.add(streamA); +// stream.add(streamB); +return stream.isEmpty() ? null : stream; +``` + +## Gulp example + +An example use case for **merge-stream** is to combine parts of a task in a project's **gulpfile.js** like this: + +```js +const gulp = require('gulp'); +const htmlValidator = require('gulp-w3c-html-validator'); +const jsHint = require('gulp-jshint'); +const mergeStream = require('merge-stream'); + +function lint() { + return mergeStream( + gulp.src('src/*.html') + .pipe(htmlValidator()) + .pipe(htmlValidator.reporter()), + gulp.src('src/*.js') + .pipe(jsHint()) + .pipe(jsHint.reporter()) + ); +} +gulp.task('lint', lint); +``` + +## License + +MIT diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js new file mode 100644 index 0000000..b1a9e1a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js @@ -0,0 +1,41 @@ +'use strict'; + +const { PassThrough } = require('stream'); + +module.exports = function (/*streams...*/) { + var sources = [] + var output = new PassThrough({objectMode: true}) + + output.setMaxListeners(0) + + output.add = add + output.isEmpty = isEmpty + + output.on('unpipe', remove) + + Array.prototype.slice.call(arguments).forEach(add) + + return output + + function add (source) { + if (Array.isArray(source)) { + source.forEach(add) + return this + } + + sources.push(source); + source.once('end', remove.bind(null, source)) + source.once('error', output.emit.bind(output, 'error')) + source.pipe(output, {end: false}) + return this + } + + function isEmpty () { + return sources.length == 0; + } + + function remove (source) { + sources = sources.filter(function (it) { return it !== source }) + if (!sources.length && output.readable) { output.end() } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json new file mode 100644 index 0000000..1a4c54c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json @@ -0,0 +1,19 @@ +{ + "name": "merge-stream", + "version": "2.0.0", + "description": "Create a stream that emits events from multiple other streams", + "files": [ + "index.js" + ], + "scripts": { + "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" + }, + "repository": "grncdr/merge-stream", + "author": "Stephen Sugden ", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "from2": "^2.0.3", + "istanbul": "^0.4.5" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE new file mode 100644 index 0000000..31dd9c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2020 Teambition + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md new file mode 100644 index 0000000..27f8eb9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md @@ -0,0 +1,144 @@ +# merge2 + +Merge multiple streams into one stream in sequence or parallel. + +[![NPM version][npm-image]][npm-url] +[![Build Status][travis-image]][travis-url] +[![Downloads][downloads-image]][downloads-url] + +## Install + +Install with [npm](https://npmjs.org/package/merge2) + +```sh +npm install merge2 +``` + +## Usage + +```js +const gulp = require('gulp') +const merge2 = require('merge2') +const concat = require('gulp-concat') +const minifyHtml = require('gulp-minify-html') +const ngtemplate = require('gulp-ngtemplate') + +gulp.task('app-js', function () { + return merge2( + gulp.src('static/src/tpl/*.html') + .pipe(minifyHtml({empty: true})) + .pipe(ngtemplate({ + module: 'genTemplates', + standalone: true + }) + ), gulp.src([ + 'static/src/js/app.js', + 'static/src/js/locale_zh-cn.js', + 'static/src/js/router.js', + 'static/src/js/tools.js', + 'static/src/js/services.js', + 'static/src/js/filters.js', + 'static/src/js/directives.js', + 'static/src/js/controllers.js' + ]) + ) + .pipe(concat('app.js')) + .pipe(gulp.dest('static/dist/js/')) +}) +``` + +```js +const stream = merge2([stream1, stream2], stream3, {end: false}) +//... +stream.add(stream4, stream5) +//.. +stream.end() +``` + +```js +// equal to merge2([stream1, stream2], stream3) +const stream = merge2() +stream.add([stream1, stream2]) +stream.add(stream3) +``` + +```js +// merge order: +// 1. merge `stream1`; +// 2. merge `stream2` and `stream3` in parallel after `stream1` merged; +// 3. merge 'stream4' after `stream2` and `stream3` merged; +const stream = merge2(stream1, [stream2, stream3], stream4) + +// merge order: +// 1. merge `stream5` and `stream6` in parallel after `stream4` merged; +// 2. merge 'stream7' after `stream5` and `stream6` merged; +stream.add([stream5, stream6], stream7) +``` + +```js +// nest merge +// equal to merge2(stream1, stream2, stream6, stream3, [stream4, stream5]); +const streamA = merge2(stream1, stream2) +const streamB = merge2(stream3, [stream4, stream5]) +const stream = merge2(streamA, streamB) +streamA.add(stream6) +``` + +## API + +```js +const merge2 = require('merge2') +``` + +### merge2() + +### merge2(options) + +### merge2(stream1, stream2, ..., streamN) + +### merge2(stream1, stream2, ..., streamN, options) + +### merge2(stream1, [stream2, stream3, ...], streamN, options) + +return a duplex stream (mergedStream). streams in array will be merged in parallel. + +### mergedStream.add(stream) + +### mergedStream.add(stream1, [stream2, stream3, ...], ...) + +return the mergedStream. + +### mergedStream.on('queueDrain', function() {}) + +It will emit 'queueDrain' when all streams merged. If you set `end === false` in options, this event give you a notice that should add more streams to merge or end the mergedStream. + +#### stream + +*option* +Type: `Readable` or `Duplex` or `Transform` stream. + +#### options + +*option* +Type: `Object`. + +* **end** - `Boolean` - if `end === false` then mergedStream will not be auto ended, you should end by yourself. **Default:** `undefined` + +* **pipeError** - `Boolean` - if `pipeError === true` then mergedStream will emit `error` event from source streams. **Default:** `undefined` + +* **objectMode** - `Boolean` . **Default:** `true` + +`objectMode` and other options(`highWaterMark`, `defaultEncoding` ...) is same as Node.js `Stream`. + +## License + +MIT © [Teambition](https://www.teambition.com) + +[npm-url]: https://npmjs.org/package/merge2 +[npm-image]: http://img.shields.io/npm/v/merge2.svg + +[travis-url]: https://travis-ci.org/teambition/merge2 +[travis-image]: http://img.shields.io/travis/teambition/merge2.svg + +[downloads-url]: https://npmjs.org/package/merge2 +[downloads-image]: http://img.shields.io/npm/dm/merge2.svg?style=flat-square diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js new file mode 100644 index 0000000..78a61ed --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js @@ -0,0 +1,144 @@ +'use strict' +/* + * merge2 + * https://github.com/teambition/merge2 + * + * Copyright (c) 2014-2020 Teambition + * Licensed under the MIT license. + */ +const Stream = require('stream') +const PassThrough = Stream.PassThrough +const slice = Array.prototype.slice + +module.exports = merge2 + +function merge2 () { + const streamsQueue = [] + const args = slice.call(arguments) + let merging = false + let options = args[args.length - 1] + + if (options && !Array.isArray(options) && options.pipe == null) { + args.pop() + } else { + options = {} + } + + const doEnd = options.end !== false + const doPipeError = options.pipeError === true + if (options.objectMode == null) { + options.objectMode = true + } + if (options.highWaterMark == null) { + options.highWaterMark = 64 * 1024 + } + const mergedStream = PassThrough(options) + + function addStream () { + for (let i = 0, len = arguments.length; i < len; i++) { + streamsQueue.push(pauseStreams(arguments[i], options)) + } + mergeStream() + return this + } + + function mergeStream () { + if (merging) { + return + } + merging = true + + let streams = streamsQueue.shift() + if (!streams) { + process.nextTick(endStream) + return + } + if (!Array.isArray(streams)) { + streams = [streams] + } + + let pipesCount = streams.length + 1 + + function next () { + if (--pipesCount > 0) { + return + } + merging = false + mergeStream() + } + + function pipe (stream) { + function onend () { + stream.removeListener('merge2UnpipeEnd', onend) + stream.removeListener('end', onend) + if (doPipeError) { + stream.removeListener('error', onerror) + } + next() + } + function onerror (err) { + mergedStream.emit('error', err) + } + // skip ended stream + if (stream._readableState.endEmitted) { + return next() + } + + stream.on('merge2UnpipeEnd', onend) + stream.on('end', onend) + + if (doPipeError) { + stream.on('error', onerror) + } + + stream.pipe(mergedStream, { end: false }) + // compatible for old stream + stream.resume() + } + + for (let i = 0; i < streams.length; i++) { + pipe(streams[i]) + } + + next() + } + + function endStream () { + merging = false + // emit 'queueDrain' when all streams merged. + mergedStream.emit('queueDrain') + if (doEnd) { + mergedStream.end() + } + } + + mergedStream.setMaxListeners(0) + mergedStream.add = addStream + mergedStream.on('unpipe', function (stream) { + stream.emit('merge2UnpipeEnd') + }) + + if (args.length) { + addStream.apply(null, args) + } + return mergedStream +} + +// check and pause streams for pipe. +function pauseStreams (streams, options) { + if (!Array.isArray(streams)) { + // Backwards-compat with old-style streams + if (!streams._readableState && streams.pipe) { + streams = streams.pipe(PassThrough(options)) + } + if (!streams._readableState || !streams.pause || !streams.pipe) { + throw new Error('Only readable stream can be merged.') + } + streams.pause() + } else { + for (let i = 0, len = streams.length; i < len; i++) { + streams[i] = pauseStreams(streams[i], options) + } + } + return streams +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json new file mode 100644 index 0000000..7777307 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json @@ -0,0 +1,43 @@ +{ + "name": "merge2", + "description": "Merge multiple streams into one stream in sequence or parallel.", + "authors": [ + "Yan Qing " + ], + "license": "MIT", + "version": "1.4.1", + "main": "./index.js", + "repository": { + "type": "git", + "url": "git@github.com:teambition/merge2.git" + }, + "homepage": "https://github.com/teambition/merge2", + "keywords": [ + "merge2", + "multiple", + "sequence", + "parallel", + "merge", + "stream", + "merge stream", + "sync" + ], + "engines": { + "node": ">= 8" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^14.3.4", + "through2": "^3.0.1", + "thunks": "^4.9.6", + "tman": "^1.10.0", + "to-through": "^2.0.0" + }, + "scripts": { + "test": "standard && tman" + }, + "files": [ + "README.md", + "index.js" + ] +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE new file mode 100755 index 0000000..9af4a67 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md new file mode 100644 index 0000000..d72a059 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md @@ -0,0 +1,1024 @@ +# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Tests](https://github.com/micromatch/micromatch/actions/workflows/test.yml/badge.svg)](https://github.com/micromatch/micromatch/actions/workflows/test.yml) + +> Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Table of Contents + +
+Details + + * [Install](#install) +- [Sponsors](#sponsors) + * [Gold Sponsors](#gold-sponsors) + * [Quickstart](#quickstart) + * [Why use micromatch?](#why-use-micromatch) + + [Matching features](#matching-features) + * [Switching to micromatch](#switching-to-micromatch) + + [From minimatch](#from-minimatch) + + [From multimatch](#from-multimatch) + * [API](#api) + * [Options](#options) + * [Options Examples](#options-examples) + + [options.basename](#optionsbasename) + + [options.bash](#optionsbash) + + [options.expandRange](#optionsexpandrange) + + [options.format](#optionsformat) + + [options.ignore](#optionsignore) + + [options.matchBase](#optionsmatchbase) + + [options.noextglob](#optionsnoextglob) + + [options.nonegate](#optionsnonegate) + + [options.noglobstar](#optionsnoglobstar) + + [options.nonull](#optionsnonull) + + [options.nullglob](#optionsnullglob) + + [options.onIgnore](#optionsonignore) + + [options.onMatch](#optionsonmatch) + + [options.onResult](#optionsonresult) + + [options.posixSlashes](#optionsposixslashes) + + [options.unescape](#optionsunescape) + * [Extended globbing](#extended-globbing) + + [Extglobs](#extglobs) + + [Braces](#braces) + + [Regex character classes](#regex-character-classes) + + [Regex groups](#regex-groups) + + [POSIX bracket expressions](#posix-bracket-expressions) + * [Notes](#notes) + + [Bash 4.3 parity](#bash-43-parity) + + [Backslashes](#backslashes) + * [Benchmarks](#benchmarks) + + [Running benchmarks](#running-benchmarks) + + [Latest results](#latest-results) + * [Contributing](#contributing) + * [About](#about) + +
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save micromatch +``` + +
+ +# Sponsors + +[Become a Sponsor](https://github.com/sponsors/jonschlinkert) to add your logo to this README, or any of [my other projects](https://github.com/jonschlinkert?tab=repositories&q=&type=&language=&sort=stargazers) + +
+ +## Quickstart + +```js +const micromatch = require('micromatch'); +// micromatch(list, patterns[, options]); +``` + +The [main export](#micromatch) takes a list of strings and one or more glob patterns: + +```js +console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['f*', 'b*'])) //=> ['foo', 'bar', 'baz'] +console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['*', '!b*'])) //=> ['foo', 'qux'] +``` + +Use [.isMatch()](#ismatch) to for boolean matching: + +```js +console.log(micromatch.isMatch('foo', 'f*')) //=> true +console.log(micromatch.isMatch('foo', ['b*', 'f*'])) //=> true +``` + +[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! + +
+ +## Why use micromatch? + +> micromatch is a [replacement](#switching-to-micromatch) for minimatch and multimatch + +* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) +* More complete support for the Bash 4.3 specification than minimatch and multimatch. Micromatch passes _all of the spec tests_ from bash, including some that bash still fails. +* **Fast & Performant** - Loads in about 5ms and performs [fast matches](#benchmarks). +* **Glob matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories +* **[Advanced globbing](#extended-globbing)** - Supports [extglobs](#extglobs), [braces](#braces-1), and [POSIX brackets](#posix-bracket-expressions), and support for escaping special characters with `\` or quotes. +* **Accurate** - Covers more scenarios [than minimatch](https://github.com/yarnpkg/yarn/pull/3339) +* **Well tested** - More than 5,000 [test assertions](./test) +* **Windows support** - More reliable windows support than minimatch and multimatch. +* **[Safe](https://github.com/micromatch/braces#braces-is-safe)** - Micromatch is not subject to DoS with brace patterns like minimatch and multimatch. + +### Matching features + +* Support for multiple glob patterns (no need for wrappers like multimatch) +* Wildcards (`**`, `*.js`) +* Negation (`'!a/*.js'`, `'*!(b).js'`) +* [extglobs](#extglobs) (`+(x|y)`, `!(a|b)`) +* [POSIX character classes](#posix-bracket-expressions) (`[[:alpha:][:digit:]]`) +* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) +* regex character classes (`foo-[1-5].js`) +* regex logical "or" (`foo/(abc|xyz).js`) + +You can mix and match these features to create whatever patterns you need! + +## Switching to micromatch + +_(There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information.)_ + +### From minimatch + +Use [micromatch.isMatch()](#ismatch) instead of `minimatch()`: + +```js +console.log(micromatch.isMatch('foo', 'b*')); //=> false +``` + +Use [micromatch.match()](#match) instead of `minimatch.match()`: + +```js +console.log(micromatch.match(['foo', 'bar'], 'b*')); //=> 'bar' +``` + +### From multimatch + +Same signature: + +```js +console.log(micromatch(['foo', 'bar', 'baz'], ['f*', '*z'])); //=> ['foo', 'baz'] +``` + +## API + +**Params** + +* `list` **{String|Array}**: List of strings to match. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) +* `returns` **{Array}**: Returns an array of matches + +**Example** + +```js +const mm = require('micromatch'); +// mm(list, patterns[, options]); + +console.log(mm(['a.js', 'a.txt'], ['*.js'])); +//=> [ 'a.js' ] +``` + +### [.matcher](index.js#L109) + +Returns a matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. + +**Params** + +* `pattern` **{String}**: Glob pattern +* `options` **{Object}** +* `returns` **{Function}**: Returns a matcher function. + +**Example** + +```js +const mm = require('micromatch'); +// mm.matcher(pattern[, options]); + +const isMatch = mm.matcher('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +### [.isMatch](index.js#L128) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* `str` **{String}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `[options]` **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const mm = require('micromatch'); +// mm.isMatch(string, patterns[, options]); + +console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(mm.isMatch('a.a', 'b.*')); //=> false +``` + +### [.not](index.js#L153) + +Returns a list of strings that _**do not match any**_ of the given `patterns`. + +**Params** + +* `list` **{Array}**: Array of strings to match. +* `patterns` **{String|Array}**: One or more glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. + +**Example** + +```js +const mm = require('micromatch'); +// mm.not(list, patterns[, options]); + +console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); +//=> ['b.b', 'c.c'] +``` + +### [.contains](index.js#L193) + +Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. + +**Params** + +* `str` **{String}**: The string to match. +* `patterns` **{String|Array}**: Glob pattern to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any of the patterns matches any part of `str`. + +**Example** + +```js +var mm = require('micromatch'); +// mm.contains(string, pattern[, options]); + +console.log(mm.contains('aa/bb/cc', '*b')); +//=> true +console.log(mm.contains('aa/bb/cc', '*d')); +//=> false +``` + +### [.matchKeys](index.js#L235) + +Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. + +**Params** + +* `object` **{Object}**: The object with keys to filter. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Object}**: Returns an object with only keys that match the given patterns. + +**Example** + +```js +const mm = require('micromatch'); +// mm.matchKeys(object, patterns[, options]); + +const obj = { aa: 'a', ab: 'b', ac: 'c' }; +console.log(mm.matchKeys(obj, '*b')); +//=> { ab: 'b' } +``` + +### [.some](index.js#L264) + +Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any `patterns` matches any of the strings in `list` + +**Example** + +```js +const mm = require('micromatch'); +// mm.some(list, patterns[, options]); + +console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// true +console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.every](index.js#L300) + +Returns true if every string in the given `list` matches any of the given glob `patterns`. + +**Params** + +* `list` **{String|Array}**: The string or array of strings to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if all `patterns` matches all of the strings in `list` + +**Example** + +```js +const mm = require('micromatch'); +// mm.every(list, patterns[, options]); + +console.log(mm.every('foo.js', ['foo.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); +// true +console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); +// false +console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); +// false +``` + +### [.all](index.js#L339) + +Returns true if **all** of the given `patterns` match the specified string. + +**Params** + +* `str` **{String|Array}**: The string to test. +* `patterns` **{String|Array}**: One or more glob patterns to use for matching. +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const mm = require('micromatch'); +// mm.all(string, patterns[, options]); + +console.log(mm.all('foo.js', ['foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', '!foo.js'])); +// false + +console.log(mm.all('foo.js', ['*.js', 'foo.js'])); +// true + +console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); +// true +``` + +### [.capture](index.js#L366) + +Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. + +**Params** + +* `glob` **{String}**: Glob pattern to use for matching. +* `input` **{String}**: String to match +* `options` **{Object}**: See available [options](#options) for changing how matches are performed +* `returns` **{Array|null}**: Returns an array of captures if the input matches the glob pattern, otherwise `null`. + +**Example** + +```js +const mm = require('micromatch'); +// mm.capture(pattern, string[, options]); + +console.log(mm.capture('test/*.js', 'test/foo.js')); +//=> ['foo'] +console.log(mm.capture('test/*.js', 'foo/bar.css')); +//=> null +``` + +### [.makeRe](index.js#L392) + +Create a regular expression from the given glob `pattern`. + +**Params** + +* `pattern` **{String}**: A glob pattern to convert to regex. +* `options` **{Object}** +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const mm = require('micromatch'); +// mm.makeRe(pattern[, options]); + +console.log(mm.makeRe('*.js')); +//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ +``` + +### [.scan](index.js#L408) + +Scan a glob pattern to separate the pattern into segments. Used by the [split](#split) method. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const mm = require('micromatch'); +const state = mm.scan(pattern[, options]); +``` + +### [.parse](index.js#L424) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `glob` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as regex source string. + +**Example** + +```js +const mm = require('micromatch'); +const state = mm.parse(pattern[, options]); +``` + +### [.braces](index.js#L451) + +Process the given brace `pattern`. + +**Params** + +* `pattern` **{String}**: String with brace pattern to process. +* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. +* `returns` **{Array}** + +**Example** + +```js +const { braces } = require('micromatch'); +console.log(braces('foo/{a,b,c}/bar')); +//=> [ 'foo/(a|b|c)/bar' ] + +console.log(braces('foo/{a,b,c}/bar', { expand: true })); +//=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] +``` + +## Options + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Match dotfiles. Otherwise dotfiles are ignored unless a `.` is explicitly defined in the pattern. | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. This option is overridden by the `expandBrace` option. | +| `failglob` | `boolean` | `false` | Similar to the `failglob` behavior in Bash, throws an error when no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `boolean` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `lookbehinds` | `boolean` | `true` | Support regex positive and negative lookbehinds. Note that you must be using Node 8.1.10 or higher to enable regex lookbehinds. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Perform case-insensitive matching. Equivalent to the regex `i` flag. Note that this option is ignored when the `flags` option is defined. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with [extglobs](#extglobs) (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support [POSIX character classes](#posix-bracket-expressions) ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `string` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove preceding backslashes from escaped glob characters before creating the regular expression to perform matches. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatitibility. | + +## Options Examples + +### options.basename + +Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. + +**Type**: `Boolean` + +**Default**: `false` + +**Example** + +```js +micromatch(['a/b.js', 'a/c.md'], '*.js'); +//=> [] + +micromatch(['a/b.js', 'a/c.md'], '*.js', { basename: true }); +//=> ['a/b.js'] +``` + +### options.bash + +Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as any other star. + +**Type**: `Boolean` + +**Default**: `true` + +**Example** + +```js +const files = ['abc', 'ajz']; +console.log(micromatch(files, '[a-c]*')); +//=> ['abc', 'ajz'] + +console.log(micromatch(files, '[a-c]*', { bash: false })); +``` + +### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a numeric folder name between `01` and `25`, with leading zeros. + +```js +const fill = require('fill-range'); +const regex = micromatch.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex) +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')) //=> true +``` + +### options.ignore + +String or array of glob patterns to match files to ignore. + +**Type**: `String|Array` + +**Default**: `undefined` + +```js +const isMatch = micromatch.matcher('*', { ignore: 'f*' }); +console.log(isMatch('foo')) //=> false +console.log(isMatch('bar')) //=> true +console.log(isMatch('baz')) //=> true +``` + +### options.matchBase + +Alias for [options.basename](#options-basename). + +### options.noextglob + +Disable extglob support, so that [extglobs](#extglobs) are regarded as literal characters. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Examples** + +```js +console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)')); +//=> ['a/b', 'a/!(z)'] + +console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', { noextglob: true })); +//=> ['a/!(z)'] (matches only as literal characters) +``` + +### options.nonegate + +Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.noglobstar + +Disable matching with globstars (`**`). + +**Type**: `Boolean` + +**Default**: `undefined` + +```js +micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); +//=> ['a/b', 'a/b/c', 'a/b/c/d'] + +micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); +//=> ['a/b'] +``` + +### options.nonull + +Alias for [options.nullglob](#options-nullglob). + +### options.nullglob + +If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. + +**Type**: `Boolean` + +**Default**: `undefined` + +### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); + // { glob: '*', regex: /^(?:(?!\.)(?=.)[^\/]*?\/?)$/, input: 'foo', output: 'foo' } +}; + +const isMatch = micromatch.matcher('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ input, output }); + // { input: 'some\\path', output: 'some/path' } + // { input: 'some\\path', output: 'some/path' } + // { input: 'some\\path', output: 'some/path' } +}; + +const isMatch = micromatch.matcher('**', { onMatch, posixSlashes: true }); +isMatch('some\\path'); +isMatch('some\\path'); +isMatch('some\\path'); +``` + +### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = micromatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +### options.posixSlashes + +Convert path separators on returned files to posix/unix-style forward slashes. Aliased as `unixify` for backwards compatibility. + +**Type**: `Boolean` + +**Default**: `true` on windows, `false` everywhere else. + +**Example** + +```js +console.log(micromatch.match(['a\\b\\c'], 'a/**')); +//=> ['a/b/c'] + +console.log(micromatch.match(['a\\b\\c'], { posixSlashes: false })); +//=> ['a\\b\\c'] +``` + +### options.unescape + +Remove backslashes from escaped glob characters before creating the regular expression to perform matches. + +**Type**: `Boolean` + +**Default**: `undefined` + +**Example** + +In this example we want to match a literal `*`: + +```js +console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c')); +//=> ['a\\*c'] + +console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c', { unescape: true })); +//=> ['a*c'] +``` + +
+
+ +## Extended globbing + +Micromatch supports the following extended globbing features. + +### Extglobs + +Extended globbing, as described by the bash man page: + +| **pattern** | **regex equivalent** | **description** | +| --- | --- | --- | +| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | +| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | +| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | +| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | +| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | + +* Note that `@` isn't a regex character. + +### Braces + +Brace patterns can be used to match specific ranges or sets of characters. + +**Example** + +The pattern `{f,b}*/{1..3}/{b,q}*` would match any of following strings: + +``` +foo/1/bar +foo/2/bar +foo/3/bar +baz/1/qux +baz/2/qux +baz/3/qux +``` + +Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. + +### Regex character classes + +Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` +* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` +* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` + +Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). + +### Regex groups + +Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: + +* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` +* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` +* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` + +As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. + +### POSIX bracket expressions + +POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. + +**Example** + +```js +console.log(micromatch.isMatch('a1', '[[:alpha:][:digit:]]')) //=> true +console.log(micromatch.isMatch('a1', '[[:alpha:][:alpha:]]')) //=> false +``` + +*** + +## Notes + +### Bash 4.3 parity + +Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. + +However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. + +### Backslashes + +There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. + +* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows, which is consistent with bash behavior. _More importantly, unescaping globs can result in unsafe regular expressions_. +* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. + +We made this decision for micromatch for a couple of reasons: + +* Consistency with bash conventions. +* Glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. + +**A note about joining paths to globs** + +Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. + +In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. + +To solve this, you might be inspired to do something like `'foo\\*'.replace(/\\/g, '/')`, but this causes another, potentially much more serious, problem. + +## Benchmarks + +### Running benchmarks + +Install dependencies for running benchmarks: + +```sh +$ cd bench && npm install +``` + +Run the benchmarks: + +```sh +$ npm run bench +``` + +### Latest results + +As of August 23, 2024 (longer bars are better): + +```sh +# .makeRe star + micromatch x 2,232,802 ops/sec ±2.34% (89 runs sampled)) + minimatch x 781,018 ops/sec ±6.74% (92 runs sampled)) + +# .makeRe star; dot=true + micromatch x 1,863,453 ops/sec ±0.74% (93 runs sampled) + minimatch x 723,105 ops/sec ±0.75% (93 runs sampled) + +# .makeRe globstar + micromatch x 1,624,179 ops/sec ±2.22% (91 runs sampled) + minimatch x 1,117,230 ops/sec ±2.78% (86 runs sampled)) + +# .makeRe globstars + micromatch x 1,658,642 ops/sec ±0.86% (92 runs sampled) + minimatch x 741,224 ops/sec ±1.24% (89 runs sampled)) + +# .makeRe with leading star + micromatch x 1,525,014 ops/sec ±1.63% (90 runs sampled) + minimatch x 561,074 ops/sec ±3.07% (89 runs sampled) + +# .makeRe - braces + micromatch x 172,478 ops/sec ±2.37% (78 runs sampled) + minimatch x 96,087 ops/sec ±2.34% (88 runs sampled))) + +# .makeRe braces - range (expanded) + micromatch x 26,973 ops/sec ±0.84% (89 runs sampled) + minimatch x 3,023 ops/sec ±0.99% (90 runs sampled)) + +# .makeRe braces - range (compiled) + micromatch x 152,892 ops/sec ±1.67% (83 runs sampled) + minimatch x 992 ops/sec ±3.50% (89 runs sampled)d)) + +# .makeRe braces - nested ranges (expanded) + micromatch x 15,816 ops/sec ±13.05% (80 runs sampled) + minimatch x 2,953 ops/sec ±1.64% (91 runs sampled) + +# .makeRe braces - nested ranges (compiled) + micromatch x 110,881 ops/sec ±1.85% (82 runs sampled) + minimatch x 1,008 ops/sec ±1.51% (91 runs sampled) + +# .makeRe braces - set (compiled) + micromatch x 134,930 ops/sec ±3.54% (63 runs sampled)) + minimatch x 43,242 ops/sec ±0.60% (93 runs sampled) + +# .makeRe braces - nested sets (compiled) + micromatch x 94,455 ops/sec ±1.74% (69 runs sampled)) + minimatch x 27,720 ops/sec ±1.84% (93 runs sampled)) +``` + +## Contributing + +All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. + +**Bug reports** + +Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: + +* [research existing issues first](../../issues) (open and closed) +* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern +* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js +* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. + +**Platform issues** + +It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") +* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/micromatch/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") +* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 523 | [jonschlinkert](https://github.com/jonschlinkert) | +| 12 | [es128](https://github.com/es128) | +| 9 | [danez](https://github.com/danez) | +| 8 | [doowb](https://github.com/doowb) | +| 6 | [paulmillr](https://github.com/paulmillr) | +| 5 | [mrmlnc](https://github.com/mrmlnc) | +| 3 | [DrPizza](https://github.com/DrPizza) | +| 2 | [Tvrqvoise](https://github.com/Tvrqvoise) | +| 2 | [antonyk](https://github.com/antonyk) | +| 2 | [MartinKolarik](https://github.com/MartinKolarik) | +| 2 | [Glazy](https://github.com/Glazy) | +| 2 | [mceIdo](https://github.com/mceIdo) | +| 2 | [TrySound](https://github.com/TrySound) | +| 1 | [yvele](https://github.com/yvele) | +| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | +| 1 | [simlu](https://github.com/simlu) | +| 1 | [curbengh](https://github.com/curbengh) | +| 1 | [fidian](https://github.com/fidian) | +| 1 | [tomByrer](https://github.com/tomByrer) | +| 1 | [ZoomerTedJackson](https://github.com/ZoomerTedJackson) | +| 1 | [styfle](https://github.com/styfle) | +| 1 | [sebdeckers](https://github.com/sebdeckers) | +| 1 | [muescha](https://github.com/muescha) | +| 1 | [juszczykjakub](https://github.com/juszczykjakub) | +| 1 | [joyceerhl](https://github.com/joyceerhl) | +| 1 | [donatj](https://github.com/donatj) | +| 1 | [frangio](https://github.com/frangio) | +| 1 | [UltCombo](https://github.com/UltCombo) | +| 1 | [DianeLooney](https://github.com/DianeLooney) | +| 1 | [devongovett](https://github.com/devongovett) | +| 1 | [Cslove](https://github.com/Cslove) | +| 1 | [amilajack](https://github.com/amilajack) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2024, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on August 23, 2024._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js new file mode 100644 index 0000000..cb9d9ef --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js @@ -0,0 +1,474 @@ +'use strict'; + +const util = require('util'); +const braces = require('braces'); +const picomatch = require('picomatch'); +const utils = require('picomatch/lib/utils'); + +const isEmptyString = v => v === '' || v === './'; +const hasBraces = v => { + const index = v.indexOf('{'); + return index > -1 && v.indexOf('}', index) > -1; +}; + +/** + * Returns an array of strings that match one or more glob patterns. + * + * ```js + * const mm = require('micromatch'); + * // mm(list, patterns[, options]); + * + * console.log(mm(['a.js', 'a.txt'], ['*.js'])); + * //=> [ 'a.js' ] + * ``` + * @param {String|Array} `list` List of strings to match. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) + * @return {Array} Returns an array of matches + * @summary false + * @api public + */ + +const micromatch = (list, patterns, options) => { + patterns = [].concat(patterns); + list = [].concat(list); + + let omit = new Set(); + let keep = new Set(); + let items = new Set(); + let negatives = 0; + + let onResult = state => { + items.add(state.output); + if (options && options.onResult) { + options.onResult(state); + } + }; + + for (let i = 0; i < patterns.length; i++) { + let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); + let negated = isMatch.state.negated || isMatch.state.negatedExtglob; + if (negated) negatives++; + + for (let item of list) { + let matched = isMatch(item, true); + + let match = negated ? !matched.isMatch : matched.isMatch; + if (!match) continue; + + if (negated) { + omit.add(matched.output); + } else { + omit.delete(matched.output); + keep.add(matched.output); + } + } + } + + let result = negatives === patterns.length ? [...items] : [...keep]; + let matches = result.filter(item => !omit.has(item)); + + if (options && matches.length === 0) { + if (options.failglob === true) { + throw new Error(`No matches found for "${patterns.join(', ')}"`); + } + + if (options.nonull === true || options.nullglob === true) { + return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; + } + } + + return matches; +}; + +/** + * Backwards compatibility + */ + +micromatch.match = micromatch; + +/** + * Returns a matcher function from the given glob `pattern` and `options`. + * The returned function takes a string to match as its only argument and returns + * true if the string is a match. + * + * ```js + * const mm = require('micromatch'); + * // mm.matcher(pattern[, options]); + * + * const isMatch = mm.matcher('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @param {String} `pattern` Glob pattern + * @param {Object} `options` + * @return {Function} Returns a matcher function. + * @api public + */ + +micromatch.matcher = (pattern, options) => picomatch(pattern, options); + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const mm = require('micromatch'); + * // mm.isMatch(string, patterns[, options]); + * + * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(mm.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `[options]` See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Backwards compatibility + */ + +micromatch.any = micromatch.isMatch; + +/** + * Returns a list of strings that _**do not match any**_ of the given `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.not(list, patterns[, options]); + * + * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); + * //=> ['b.b', 'c.c'] + * ``` + * @param {Array} `list` Array of strings to match. + * @param {String|Array} `patterns` One or more glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array} Returns an array of strings that **do not match** the given patterns. + * @api public + */ + +micromatch.not = (list, patterns, options = {}) => { + patterns = [].concat(patterns).map(String); + let result = new Set(); + let items = []; + + let onResult = state => { + if (options.onResult) options.onResult(state); + items.push(state.output); + }; + + let matches = new Set(micromatch(list, patterns, { ...options, onResult })); + + for (let item of items) { + if (!matches.has(item)) { + result.add(item); + } + } + return [...result]; +}; + +/** + * Returns true if the given `string` contains the given pattern. Similar + * to [.isMatch](#isMatch) but the pattern can match any part of the string. + * + * ```js + * var mm = require('micromatch'); + * // mm.contains(string, pattern[, options]); + * + * console.log(mm.contains('aa/bb/cc', '*b')); + * //=> true + * console.log(mm.contains('aa/bb/cc', '*d')); + * //=> false + * ``` + * @param {String} `str` The string to match. + * @param {String|Array} `patterns` Glob pattern to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any of the patterns matches any part of `str`. + * @api public + */ + +micromatch.contains = (str, pattern, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + if (Array.isArray(pattern)) { + return pattern.some(p => micromatch.contains(str, p, options)); + } + + if (typeof pattern === 'string') { + if (isEmptyString(str) || isEmptyString(pattern)) { + return false; + } + + if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { + return true; + } + } + + return micromatch.isMatch(str, pattern, { ...options, contains: true }); +}; + +/** + * Filter the keys of the given object with the given `glob` pattern + * and `options`. Does not attempt to match nested keys. If you need this feature, + * use [glob-object][] instead. + * + * ```js + * const mm = require('micromatch'); + * // mm.matchKeys(object, patterns[, options]); + * + * const obj = { aa: 'a', ab: 'b', ac: 'c' }; + * console.log(mm.matchKeys(obj, '*b')); + * //=> { ab: 'b' } + * ``` + * @param {Object} `object` The object with keys to filter. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Object} Returns an object with only keys that match the given patterns. + * @api public + */ + +micromatch.matchKeys = (obj, patterns, options) => { + if (!utils.isObject(obj)) { + throw new TypeError('Expected the first argument to be an object'); + } + let keys = micromatch(Object.keys(obj), patterns, options); + let res = {}; + for (let key of keys) res[key] = obj[key]; + return res; +}; + +/** + * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.some(list, patterns[, options]); + * + * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // true + * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` + * @api public + */ + +micromatch.some = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (items.some(item => isMatch(item))) { + return true; + } + } + return false; +}; + +/** + * Returns true if every string in the given `list` matches + * any of the given glob `patterns`. + * + * ```js + * const mm = require('micromatch'); + * // mm.every(list, patterns[, options]); + * + * console.log(mm.every('foo.js', ['foo.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); + * // true + * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); + * // false + * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); + * // false + * ``` + * @param {String|Array} `list` The string or array of strings to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` + * @api public + */ + +micromatch.every = (list, patterns, options) => { + let items = [].concat(list); + + for (let pattern of [].concat(patterns)) { + let isMatch = picomatch(String(pattern), options); + if (!items.every(item => isMatch(item))) { + return false; + } + } + return true; +}; + +/** + * Returns true if **all** of the given `patterns` match + * the specified string. + * + * ```js + * const mm = require('micromatch'); + * // mm.all(string, patterns[, options]); + * + * console.log(mm.all('foo.js', ['foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); + * // false + * + * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); + * // true + * + * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); + * // true + * ``` + * @param {String|Array} `str` The string to test. + * @param {String|Array} `patterns` One or more glob patterns to use for matching. + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +micromatch.all = (str, patterns, options) => { + if (typeof str !== 'string') { + throw new TypeError(`Expected a string: "${util.inspect(str)}"`); + } + + return [].concat(patterns).every(p => picomatch(p, options)(str)); +}; + +/** + * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. + * + * ```js + * const mm = require('micromatch'); + * // mm.capture(pattern, string[, options]); + * + * console.log(mm.capture('test/*.js', 'test/foo.js')); + * //=> ['foo'] + * console.log(mm.capture('test/*.js', 'foo/bar.css')); + * //=> null + * ``` + * @param {String} `glob` Glob pattern to use for matching. + * @param {String} `input` String to match + * @param {Object} `options` See available [options](#options) for changing how matches are performed + * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. + * @api public + */ + +micromatch.capture = (glob, input, options) => { + let posix = utils.isWindows(options); + let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); + let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); + + if (match) { + return match.slice(1).map(v => v === void 0 ? '' : v); + } +}; + +/** + * Create a regular expression from the given glob `pattern`. + * + * ```js + * const mm = require('micromatch'); + * // mm.makeRe(pattern[, options]); + * + * console.log(mm.makeRe('*.js')); + * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ + * ``` + * @param {String} `pattern` A glob pattern to convert to regex. + * @param {Object} `options` + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +micromatch.makeRe = (...args) => picomatch.makeRe(...args); + +/** + * Scan a glob pattern to separate the pattern into segments. Used + * by the [split](#split) method. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.scan(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +micromatch.scan = (...args) => picomatch.scan(...args); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const mm = require('micromatch'); + * const state = mm.parse(pattern[, options]); + * ``` + * @param {String} `glob` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as regex source string. + * @api public + */ + +micromatch.parse = (patterns, options) => { + let res = []; + for (let pattern of [].concat(patterns || [])) { + for (let str of braces(String(pattern), options)) { + res.push(picomatch.parse(str, options)); + } + } + return res; +}; + +/** + * Process the given brace `pattern`. + * + * ```js + * const { braces } = require('micromatch'); + * console.log(braces('foo/{a,b,c}/bar')); + * //=> [ 'foo/(a|b|c)/bar' ] + * + * console.log(braces('foo/{a,b,c}/bar', { expand: true })); + * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] + * ``` + * @param {String} `pattern` String with brace pattern to process. + * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. + * @return {Array} + * @api public + */ + +micromatch.braces = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + if ((options && options.nobrace === true) || !hasBraces(pattern)) { + return [pattern]; + } + return braces(pattern, options); +}; + +/** + * Expand braces + */ + +micromatch.braceExpand = (pattern, options) => { + if (typeof pattern !== 'string') throw new TypeError('Expected a string'); + return micromatch.braces(pattern, { ...options, expand: true }); +}; + +/** + * Expose micromatch + */ + +// exposed for tests +micromatch.hasBraces = hasBraces; +module.exports = micromatch; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json new file mode 100644 index 0000000..d5558bb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json @@ -0,0 +1,119 @@ +{ + "name": "micromatch", + "description": "Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch.", + "version": "4.0.8", + "homepage": "https://github.com/micromatch/micromatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "(https://github.com/DianeLooney)", + "Amila Welihinda (amilajack.com)", + "Bogdan Chadkin (https://github.com/TrySound)", + "Brian Woodward (https://twitter.com/doowb)", + "Devon Govett (http://badassjs.com)", + "Elan Shanker (https://github.com/es128)", + "Fabrício Matté (https://ultcombo.js.org)", + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Martin Kolárik (https://kolarik.sk)", + "Olsten Larck (https://i.am.charlike.online)", + "Paul Miller (paulmillr.com)", + "Tom Byrer (https://github.com/tomByrer)", + "Tyler Akins (http://rumkin.com)", + "Peter Bright (https://github.com/drpizza)", + "Kuba Juszczyk (https://github.com/ku8ar)" + ], + "repository": "micromatch/micromatch", + "bugs": { + "url": "https://github.com/micromatch/micromatch/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.6" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "devDependencies": { + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "minimatch": "^5.0.1", + "mocha": "^9.2.2", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "bash", + "bracket", + "character-class", + "expand", + "expansion", + "expression", + "extglob", + "extglobs", + "file", + "files", + "filter", + "find", + "glob", + "globbing", + "globs", + "globstar", + "lookahead", + "lookaround", + "lookbehind", + "match", + "matcher", + "matches", + "matching", + "micromatch", + "minimatch", + "multimatch", + "negate", + "negation", + "path", + "pattern", + "patterns", + "posix", + "regex", + "regexp", + "regular", + "shell", + "star", + "wildcard" + ], + "verb": { + "toc": "collapsible", + "layout": "default", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "nanomatch" + ] + }, + "reflinks": [ + "extglob", + "fill-range", + "glob-object", + "minimatch", + "multimatch" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts new file mode 100644 index 0000000..b4047d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts @@ -0,0 +1,54 @@ +declare const mimicFn: { + /** + Make a function mimic another one. It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + + @param to - Mimicking function. + @param from - Function to mimic. + @returns The modified `to` function. + + @example + ``` + import mimicFn = require('mimic-fn'); + + function foo() {} + foo.unicorn = '🦄'; + + function wrapper() { + return foo(); + } + + console.log(wrapper.name); + //=> 'wrapper' + + mimicFn(wrapper, foo); + + console.log(wrapper.name); + //=> 'foo' + + console.log(wrapper.unicorn); + //=> '🦄' + ``` + */ + < + ArgumentsType extends unknown[], + ReturnType, + FunctionType extends (...arguments: ArgumentsType) => ReturnType + >( + to: (...arguments: ArgumentsType) => ReturnType, + from: FunctionType + ): FunctionType; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function mimicFn< + // ArgumentsType extends unknown[], + // ReturnType, + // FunctionType extends (...arguments: ArgumentsType) => ReturnType + // >( + // to: (...arguments: ArgumentsType) => ReturnType, + // from: FunctionType + // ): FunctionType; + // export = mimicFn; + default: typeof mimicFn; +}; + +export = mimicFn; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js new file mode 100644 index 0000000..1a59705 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js @@ -0,0 +1,13 @@ +'use strict'; + +const mimicFn = (to, from) => { + for (const prop of Reflect.ownKeys(from)) { + Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop)); + } + + return to; +}; + +module.exports = mimicFn; +// TODO: Remove this for the next major release +module.exports.default = mimicFn; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json new file mode 100644 index 0000000..199d2c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json @@ -0,0 +1,42 @@ +{ + "name": "mimic-fn", + "version": "2.1.0", + "description": "Make a function mimic another one", + "license": "MIT", + "repository": "sindresorhus/mimic-fn", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "function", + "mimic", + "imitate", + "rename", + "copy", + "inherit", + "properties", + "name", + "func", + "fn", + "set", + "infer", + "change" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md new file mode 100644 index 0000000..0ef8a13 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md @@ -0,0 +1,69 @@ +# mimic-fn [![Build Status](https://travis-ci.org/sindresorhus/mimic-fn.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-fn) + +> Make a function mimic another one + +Useful when you wrap a function in another function and like to preserve the original name and other properties. + + +## Install + +``` +$ npm install mimic-fn +``` + + +## Usage + +```js +const mimicFn = require('mimic-fn'); + +function foo() {} +foo.unicorn = '🦄'; + +function wrapper() { + return foo(); +} + +console.log(wrapper.name); +//=> 'wrapper' + +mimicFn(wrapper, foo); + +console.log(wrapper.name); +//=> 'foo' + +console.log(wrapper.unicorn); +//=> '🦄' +``` + + +## API + +It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. + +### mimicFn(to, from) + +Modifies the `to` function and returns it. + +#### to + +Type: `Function` + +Mimicking function. + +#### from + +Type: `Function` + +Function to mimic. + + +## Related + +- [rename-fn](https://github.com/sindresorhus/rename-fn) - Rename a function +- [keep-func-props](https://github.com/ehmicky/keep-func-props) - Wrap a function without changing its name, length and other properties + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml new file mode 100644 index 0000000..5c04817 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml @@ -0,0 +1,15 @@ +language: c +services: docker +os: linux +env: + - PACKAGE=python TARGET=linux64 MODE=Debug + - PACKAGE=dune-freetype TARGET=linux64 MODE=Debug + - PACKAGE=haxx-libcurl TARGET=linux64 MODE=Debug + - PACKAGE=fmod TARGET=linux64 MODE=Debug + - PACKAGE=intel-tbb TARGET=linux64 MODE=Debug + - PACKAGE=cryptopp TARGET=linux64 MODE=Debug + - PACKAGE=ois TARGET=linux64 MODE=Debug + - PACKAGE=bullet2 TARGET=linux64 MODE=Debug +script: + - PACKAGE=$PACKAGE make $TARGET + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE new file mode 100644 index 0000000..9e0fb24 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile new file mode 100644 index 0000000..bfa5464 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile @@ -0,0 +1,21 @@ +PACKAGE ?= . +MODE ?= Debug + +all: clean build + +build: + (cd cmaki_identifier && npm install --unsafe-perm) + (cd cmaki_generator && ./build ${PACKAGE} -d) + +clean: + (cd cmaki_identifier && rm -Rf bin rm -Rf artifacts) + +linux64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} linux64 make + +windows64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} windows64 make + +android64: + docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} android64 make + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README new file mode 100644 index 0000000..75a7863 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README @@ -0,0 +1,57 @@ +# fusion projects in one product + +- cmaki +- cmaki_scripts +- cmaki_identifier +- cmaki_docker +- cmaki_generator +- servfactor + +# variables de entorno +- Servidor de artefactos: +- NPP_SERVER = htpp://.... + +- Modo de compilación: +- NPP_MODE = Debug, Release .... + +- Directorio de instalación: +- NPP_INSTALL + +- Utilizar artefactos cacheados o compilar siempre: +- NPP_CACHE=TRUE/FALSE + + + + + + +refactor cmake +----------------------------------- + +cmaki_library ---------> npp_shared +cmaki_static_library --> npp_static +cmaki_executable ------> npp_executable +cmaki_test ------------> npp_test +cmaki_google_test -----> npp_google_test +cmaki_python_test -----> npp_python_test + + + +Comandos uso +------------ +npm install +npm test + +npm run create # crear package +npm run upload # subir package + + +windows environment +------------------ +visual studio 2019 +mini conda +npm +cmake +pip install conan +chocolatey +choco install tortoisegit diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml new file mode 100644 index 0000000..44de95c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml @@ -0,0 +1,5 @@ +language: c +services: docker +os: linux +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake new file mode 100644 index 0000000..4bfc61e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake @@ -0,0 +1,157 @@ +cmake_minimum_required(VERSION 2.8.7) + +include("${CMAKE_CURRENT_LIST_DIR}/Utils.cmake") +include(CMakeParseArguments) + +find_package(Git) +if(NOT GIT_FOUND) + message(FATAL_ERROR "git not found!") +endif() + + +# clone a git repo into a directory at configure time +# this can be useful for including cmake-library projects that contain *.cmake files +# the function will automatically init git submodules too +# +# ATTENTION: CMakeLists-files in the cloned repo will NOT be build automatically +# +# why not use ExternalProject_Add you ask? because we need to run this at configure time +# +# USAGE: +# git_clone( +# PROJECT_NAME +# GIT_URL +# [GIT_TAG|GIT_BRANCH|GIT_COMMIT ] +# [DIRECTORY ] +# [QUIET] +# ) +# +# +# ARGUMENTS: +# PROJECT_NAME +# name of the project that will be used in output variables. +# must be the same as the git directory/repo name +# +# GIT_URL +# url to the git repo +# +# GIT_TAG|GIT_BRANCH|GIT_COMMIT +# optional +# the tag/branch/commit to checkout +# default is master +# +# DIRECTORY +# optional +# the directory the project will be cloned into +# default is the build directory, similar to ExternalProject (${CMAKE_BINARY_DIR}) +# +# QUIET +# optional +# don't print status messages +# +# +# OUTPUT VARIABLES: +# _SOURCE_DIR +# top level source directory of the cloned project +# +# +# EXAMPLE: +# git_clone( +# PROJECT_NAME testProj +# GIT_URL https://github.com/test/test.git +# GIT_COMMIT a1b2c3 +# DIRECTORY ${CMAKE_BINARY_DIR} +# QUIET +# ) +# +# include(${testProj_SOURCE_DIR}/cmake/myFancyLib.cmake) + +function(cmaki_git_clone) + + cmake_parse_arguments( + PARGS # prefix of output variables + "QUIET" # list of names of the boolean arguments (only defined ones will be true) + "PROJECT_NAME;GIT_URL;GIT_TAG;GIT_BRANCH;GIT_COMMIT;DIRECTORY" # list of names of mono-valued arguments + "" # list of names of multi-valued arguments (output variables are lists) + ${ARGN} # arguments of the function to parse, here we take the all original ones + ) # remaining unparsed arguments can be found in PARGS_UNPARSED_ARGUMENTS + + if(NOT PARGS_PROJECT_NAME) + message(FATAL_ERROR "You must provide a project name") + endif() + + if(NOT PARGS_GIT_URL) + message(FATAL_ERROR "You must provide a git url") + endif() + + if(NOT PARGS_DIRECTORY) + set(PARGS_DIRECTORY ${CMAKE_BINARY_DIR}) + endif() + + set(${PARGS_PROJECT_NAME}_SOURCE_DIR + ${PARGS_DIRECTORY}/${PARGS_PROJECT_NAME} + CACHE INTERNAL "" FORCE) # makes var visible everywhere because PARENT_SCOPE wouldn't include this scope + + set(SOURCE_DIR ${PARGS_PROJECT_NAME}_SOURCE_DIR) + + # check that only one of GIT_TAG xor GIT_BRANCH xor GIT_COMMIT was passed + at_most_one(at_most_one_tag ${PARGS_GIT_TAG} ${PARGS_GIT_BRANCH} ${PARGS_GIT_COMMIT}) + + if(NOT at_most_one_tag) + message(FATAL_ERROR "you can only provide one of GIT_TAG, GIT_BRANCH or GIT_COMMIT") + endif() + + if(NOT PARGS_QUIET) + message(STATUS "downloading/updating ${PARGS_PROJECT_NAME}") + endif() + + # first clone the repo + if(EXISTS ${${SOURCE_DIR}}) + if(NOT PARGS_QUIET) + message(STATUS "${PARGS_PROJECT_NAME} directory found, pulling...") + endif() + + execute_process( + COMMAND ${GIT_EXECUTABLE} pull origin master + COMMAND ${GIT_EXECUTABLE} submodule update --remote + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + else() + if(NOT PARGS_QUIET) + message(STATUS "${PARGS_PROJECT_NAME} directory not found, cloning...") + endif() + + execute_process( + COMMAND ${GIT_EXECUTABLE} clone ${PARGS_GIT_URL} --recursive + WORKING_DIRECTORY ${PARGS_DIRECTORY} + OUTPUT_VARIABLE git_output) + endif() + + if(NOT PARGS_QUIET) + message("${git_output}") + endif() + + # now checkout the right commit + if(PARGS_GIT_TAG) + execute_process( + COMMAND ${GIT_EXECUTABLE} fetch --all --tags --prune + COMMAND ${GIT_EXECUTABLE} checkout tags/${PARGS_GIT_TAG} -b tag_${PARGS_GIT_TAG} + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + elseif(PARGS_GIT_BRANCH OR PARGS_GIT_COMMIT) + execute_process( + COMMAND ${GIT_EXECUTABLE} checkout ${PARGS_GIT_BRANCH} + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + else() + message(STATUS "no tag specified, defaulting to master") + execute_process( + COMMAND ${GIT_EXECUTABLE} checkout master + WORKING_DIRECTORY ${${SOURCE_DIR}} + OUTPUT_VARIABLE git_output) + endif() + + if(NOT PARGS_QUIET) + message("${git_output}") + endif() +endfunction() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE new file mode 100644 index 0000000..7e79e4d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md new file mode 100644 index 0000000..9d7b1b0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md @@ -0,0 +1,4 @@ +# :construction: I am under construction [![npm version](https://badge.fury.io/js/cmaki.svg)](https://badge.fury.io/js/cmaki) +Don't use it [![Build Status](https://travis-ci.org/makiolo/cmaki.svg?branch=master)](https://travis-ci.org/makiolo/cmaki) +# quick +bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/bootstrap.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake new file mode 100644 index 0000000..a76708c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake @@ -0,0 +1,32 @@ +# returns true if only a single one of its arguments is true +function(xor result) + set(true_args_count 0) + + foreach(foo ${ARGN}) + if(foo) + math(EXPR true_args_count "${true_args_count}+1") + endif() + endforeach() + + if(NOT (${true_args_count} EQUAL 1)) + set(${result} FALSE PARENT_SCOPE) + else() + set(${result} TRUE PARENT_SCOPE) + endif() +endfunction() + +function(at_most_one result) + set(true_args_count 0) + + foreach(foo ${ARGN}) + if(foo) + math(EXPR true_args_count "${true_args_count}+1") + endif() + endforeach() + + if(${true_args_count} GREATER 1) + set(${result} FALSE PARENT_SCOPE) + else() + set(${result} TRUE PARENT_SCOPE) + endif() +endfunction() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh new file mode 100755 index 0000000..faeadbd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" + +if [[ "$WINEARCH" = "win32" ]]; then + wine $CMAKI_INSTALL/cmaki_identifier.exe +else + $CMAKI_EMULATOR $CMAKI_INSTALL/cmaki_identifier +fi + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake new file mode 100644 index 0000000..74b034f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake @@ -0,0 +1,529 @@ +if(NOT DEFINED CMAKE_MODULE_PATH) + set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}) +endif() + +IF(NOT DEFINED CMAKI_PATH) + set(CMAKI_PATH ${CMAKE_CURRENT_LIST_DIR}) +ENDIF() + +include("${CMAKE_CURRENT_LIST_DIR}/facts/facts.cmake") +include("${CMAKE_CURRENT_LIST_DIR}/GitUtils.cmake") + +option(FIRST_ERROR "stop on first compilation error" FALSE) + +macro(cmaki_setup) + enable_modern_cpp() + enable_testing() + SET(CMAKE_BUILD_TYPE_INIT Release) + set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS ON) + IF(WITH_CONAN) + # Conan + message("-- Using conan dir: ${CMAKE_BINARY_DIR}") + include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") + conan_basic_setup() + ENDIF() +endmacro() + +macro (mark_as_internal _var) + set(${_var} ${${_var}} CACHE INTERNAL "hide this!" FORCE) +endmacro(mark_as_internal _var) + +macro (option_combobox _var options default_option comment) + set(${_var} "${default_option}" CACHE STRING "${comment}") + set(${_var}Values "${options}" CACHE INTERNAL "hide this!" FORCE) + set_property(CACHE ${_var} PROPERTY STRINGS ${${_var}Values}) +endmacro() + +function(cmaki_install_file FROM) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_file_into FROM TO) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_file_and_rename FROM NEWNAME) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) + endforeach() +endfunction() + +function(cmaki_install_file_into_and_rename FROM TO NEWNAME) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) + endforeach() +endfunction() + +function(cmaki_install_files FROM) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + FILE(GLOB files ${FROM}) + INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_install_files_into FROM TO) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + FILE(GLOB files ${FROM}) + INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +macro(cmaki_install_inside_dir _DESTINE) + file(GLOB DEPLOY_FILES_AND_DIRS "${_DESTINE}/*") + foreach(ITEM ${DEPLOY_FILES_AND_DIRS}) + IF( IS_DIRECTORY "${ITEM}" ) + LIST( APPEND DIRS_TO_DEPLOY "${ITEM}" ) + ELSE() + IF(ITEM STREQUAL "${_DESTINE}/CMakeLists.txt") + MESSAGE("skipped file: ${_DESTINE}/CMakeLists.txt") + ELSE() + LIST(APPEND FILES_TO_DEPLOY "${ITEM}") + ENDIF() + ENDIF() + endforeach() + INSTALL(FILES ${FILES_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) + INSTALL(DIRECTORY ${DIRS_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) +endmacro() + +macro(cmaki_install_dir _DESTINE) + INSTALL(DIRECTORY ${_DESTINE} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) +endmacro() + +macro(cmaki_parse_parameters) + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 _MAIN_NAME) + list(REMOVE_AT PARAMETERS 0) + SET(HAVE_TESTS FALSE) + SET(HAVE_PCH FALSE) + SET(HAVE_PTHREADS FALSE) + set(_DEPENDS) + set(_SOURCES) + set(_TESTS) + set(_PCH) + set(_INCLUDES) + set(_SUFFIX_DESTINATION) + set(NOW_IN SOURCES) + while(PARAMETERS) + list(GET PARAMETERS 0 PARM) + if(PARM STREQUAL DEPENDS) + set(NOW_IN DEPENDS) + elseif(PARM STREQUAL SOURCES) + set(NOW_IN SOURCES) + elseif(PARM STREQUAL TESTS) + set(NOW_IN TESTS) + elseif(PARM STREQUAL PCH) + set(NOW_IN PCH) + elseif(PARM STREQUAL PTHREADS) + if(NOT WIN32) + # no enabled in windows + set(HAVE_PTHREADS TRUE) + endif() + elseif(PARM STREQUAL INCLUDES) + set(NOW_IN INCLUDES) + elseif(PARM STREQUAL DESTINATION) + set(NOW_IN DESTINATION) + else() + if(NOW_IN STREQUAL DEPENDS) + set(_DEPENDS ${_DEPENDS} ${PARM}) + elseif(NOW_IN STREQUAL SOURCES) + set(_SOURCES ${_SOURCES} ${PARM}) + elseif(NOW_IN STREQUAL TESTS) + set(_TESTS ${_TESTS} ${PARM}) + SET(HAVE_TESTS TRUE) + elseif(NOW_IN STREQUAL PCH) + set(_PCH ${PARM}) + SET(HAVE_PCH TRUE) + elseif(NOW_IN STREQUAL INCLUDES) + set(_INCLUDES ${_INCLUDES} ${PARM}) + elseif(NOW_IN STREQUAL DESTINATION) + set(_SUFFIX_DESTINATION ${PARM}) + else() + message(FATAL_ERROR "Unknown argument ${PARM}.") + endif() + endif() + list(REMOVE_AT PARAMETERS 0) + endwhile() +endmacro() + +function(cmaki_simple_executable) + cmaki_parse_parameters(${ARGV}) + set(_EXECUTABLE_NAME ${_MAIN_NAME}) + MESSAGE("++ executable ${_EXECUTABLE_NAME}") + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_EXECUTABLE_NAME}) + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_EXECUTABLE_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + if(WIN32) + ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) + else() + ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) + endif() + target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_EXECUTABLE_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_EXECUTABLE_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_clang() + +endfunction() + +function(cmaki_simple_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + MESSAGE("++ library ${_LIBRARY_NAME}") + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_LIBRARY_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_clang() + +endfunction() + +function(cmaki_simple_test) + cmaki_parse_parameters(${ARGV}) + set(_TEST_NAME ${_MAIN_NAME}) + common_flags() + common_linking(${_TEST_NAME}) + MESSAGE("++ test ${_TEST_NAME}") + foreach(INCLUDE_DIR ${_INCLUDES}) + target_include_directories(${_TEST_NAME} ${INCLUDE_DIR}) + endforeach() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_executable(${_TEST_NAME} ${_SOURCES}) + target_link_libraries(${_TEST_NAME} ${_DEPENDS}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_TEST_NAME} -lpthread) + endif() + endif() + common_linking(${_TEST_NAME}) + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_TEST_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + if(WIN32) + add_test( + NAME ${_TEST_NAME}__ + COMMAND ${_TEST_NAME} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + + if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + find_program(VALGRIND "valgrind") + if(VALGRIND) + add_test( + NAME ${_TEST_NAME}_memcheck + COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_cachegrind + COMMAND "${VALGRIND}" --tool=cachegrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_helgrind + COMMAND "${VALGRIND}" --tool=helgrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_callgrind + COMMAND "${VALGRIND}" --tool=callgrind $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_drd + COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + message(FATAL_ERROR "no valgrind detected") + endif() + else() + add_test( + NAME ${_TEST_NAME}_test + COMMAND bash cmaki_emulator.sh $ + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + CONFIGURATIONS ${BUILD_TYPE}) + endif() + endif() + endforeach() + generate_vcxproj_user(${_TEST_NAME}) + generate_clang() + +endfunction() + +macro(common_linking) + + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 TARGET) + # if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + # target_link_libraries(${TARGET} -lubsan) + # endif() + +endmacro() + +macro(common_flags) + + if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) + add_definitions(/wd4251) + add_definitions(/wd4275) + add_definitions(/wd4239) + add_definitions(/wd4316) + add_definitions(/wd4127) + add_definitions(/wd4245) + add_definitions(/wd4458) + add_definitions(/wd4146) + add_definitions(/wd4244) + add_definitions(/wd4189) + add_definitions(/wd4100) + add_definitions(/wd4706) + add_definitions(/WX /W4) + add_definitions(-Zm200) + endif() + + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") + endif() + +endmacro() + +macro(enable_modern_cpp) + + if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) + add_definitions(/EHsc) + add_definitions(/D_SCL_SECURE_NO_WARNINGS) + else() + # add_definitions(-fno-rtti -fno-exceptions ) + # activate all warnings and convert in errors + # add_definitions(-Weffc++) + # add_definitions(-pedantic -pedantic-errors) + + # Python: need disabling: initialization discards ‘const’ qualifier from pointer target type + # add_definitions(-Werror) + + add_definitions(-Wall -Wextra -Waggregate-return -Wcast-align -Wcast-qual -Wconversion) + add_definitions(-Wdisabled-optimization -Wformat=2 -Wformat-nonliteral -Wformat-security -Wformat-y2k) + add_definitions(-Wimport -Winit-self -Winline -Winvalid-pch -Wlong-long -Wmissing-field-initializers -Wmissing-format-attribute) + add_definitions(-Wpointer-arith -Wredundant-decls -Wshadow) + add_definitions(-Wstack-protector -Wunreachable-code -Wunused) + add_definitions(-Wunused-parameter -Wvariadic-macros -Wwrite-strings) + add_definitions(-Wswitch-default -Wswitch-enum) + # only gcc + # convert error in warnings + add_definitions(-Wno-error=shadow) + add_definitions(-Wno-error=long-long) + add_definitions(-Wno-error=aggregate-return) + add_definitions(-Wno-error=unused-variable) + add_definitions(-Wno-error=unused-parameter) + add_definitions(-Wno-error=deprecated-declarations) + add_definitions(-Wno-error=missing-include-dirs) + add_definitions(-Wno-error=packed) + add_definitions(-Wno-error=switch-default) + add_definitions(-Wno-error=float-equal) + add_definitions(-Wno-error=invalid-pch) + add_definitions(-Wno-error=cast-qual) + add_definitions(-Wno-error=conversion) + add_definitions(-Wno-error=switch-enum) + add_definitions(-Wno-error=redundant-decls) + add_definitions(-Wno-error=stack-protector) + add_definitions(-Wno-error=extra) + add_definitions(-Wno-error=unused-result) + add_definitions(-Wno-error=sign-compare) + + # raknet + add_definitions(-Wno-error=address) + add_definitions(-Wno-error=cast-qual) + add_definitions(-Wno-error=missing-field-initializers) + add_definitions(-Wno-error=write-strings) + add_definitions(-Wno-error=format-nonliteral) + + # sdl2 + add_definitions(-Wno-error=sign-conversion) + + # TODO: remove + add_definitions(-Wno-error=reorder) + + # if not have openmp + add_definitions(-Wno-error=unknown-pragmas) + + if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + add_definitions(-Wno-error=suggest-attribute=format) + add_definitions(-Wno-error=suggest-attribute=noreturn) + add_definitions(-Wno-aggregate-return) + add_definitions(-Wno-long-long) + add_definitions(-Wno-shadow) + add_definitions(-Wno-strict-aliasing) + add_definitions(-Wno-error=inline) + add_definitions(-Wno-error=maybe-uninitialized) + add_definitions(-Wno-error=unused-but-set-variable) + add_definitions(-Wno-error=unused-local-typedefs) + # add_definitions(-Wno-error=float-conversion) + else() + add_definitions(-Wstrict-aliasing=2) + add_definitions(-Wno-error=format-nonliteral) + add_definitions(-Wno-error=cast-align) + add_definitions(-Wno-error=deprecated-register) + add_definitions(-Wno-error=mismatched-tags) + add_definitions(-Wno-error=overloaded-virtual) + add_definitions(-Wno-error=unused-private-field) + add_definitions(-Wno-error=unreachable-code) + # add_definitions(-Wno-error=discarded-qualifiers) + endif() + + # In Linux default now is not export symbols + # add_definitions(-fvisibility=hidden) + + # stop in first error + if(FIRST_ERROR) + add_definitions(-Wfatal-errors) + endif() + + endif() + + if (NOT DEFINED EXTRA_DEF) + if(NOT WIN32 OR MINGW OR MSYS) + include(CheckCXXCompilerFlag) + CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) + CHECK_CXX_COMPILER_FLAG("-std=c++1y" COMPILER_SUPPORTS_CXX1Y) + CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) + CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) + + if(COMPILER_SUPPORTS_CXX14) + set(CMAKE_CXX_STANDARD 14) + message("-- C++14 Enabled") + elseif(COMPILER_SUPPORTS_CXX11) + set(CMAKE_CXX_STANDARD 11) + message("-- C++11 Enabled") + elseif(COMPILER_SUPPORTS_CXX0X) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + message("-- C++0x Enabled") + else() + message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") + endif() + endif() + else() + add_definitions(${EXTRA_DEF}) + endif() + + # TODO: need different combinations of artifacts (coverage=off / coverage=on, etc ...) + # if ((DEFINED COVERAGE) AND (COVERAGE STREQUAL "TRUE")) + # https://github.com/google/sanitizers/wiki/AddressSanitizerAsDso + # flags + if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 --coverage") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-elide-constructors") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-inline") + endif() + + # linker flags + if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) + SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage") + endif() + # endif() + +endmacro() + +macro(generate_vcxproj_user _EXECUTABLE_NAME) + IF(MSVC) + set(project_vcxproj_user "${CMAKE_CURRENT_BINARY_DIR}/${_EXECUTABLE_NAME}.vcxproj.user") + if (NOT EXISTS ${project_vcxproj_user}) + FILE(WRITE "${project_vcxproj_user}" + "\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n" + "$(TargetDir)\n" + "WindowsLocalDebugger\n" + "\n" + "\n") + endif() + ENDIF() +endmacro() + +macro(generate_clang) + # Generate .clang_complete for full completation in vim + clang_complete + set(extra_parameters "") + get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) + foreach(dir ${dirs}) + set(extra_parameters ${extra_parameters} -I${dir}) + endforeach() + get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY COMPILE_DEFINITIONS) + foreach(dir ${dirs}) + set(extra_parameters ${extra_parameters} -D${dir}) + endforeach() + STRING(REGEX REPLACE ";" "\n" extra_parameters "${extra_parameters}") + FILE(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/.clang_complete" "${extra_parameters}\n") +endmacro() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake new file mode 100644 index 0000000..b5409fd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake @@ -0,0 +1,735 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) +cmake_policy(SET CMP0045 OLD) + +find_program(PYTHON_EXECUTABLE NAMES python3.6 python3.5 python3 python) + +IF(NOT DEFINED CMAKI_PWD) + set(CMAKI_PWD $ENV{CMAKI_PWD}) +ENDIF() + +IF(NOT DEFINED CMAKI_INSTALL) + set(CMAKI_INSTALL $ENV{CMAKI_INSTALL}) +ENDIF() + +IF(NOT DEFINED NPP_ARTIFACTS_PATH) + set(NPP_ARTIFACTS_PATH ${CMAKI_PWD}/artifacts) +ENDIF() + +IF(NOT DEFINED CMAKE_PREFIX_PATH) + set(CMAKE_PREFIX_PATH ${NPP_ARTIFACTS_PATH}/cmaki_find_package) +ENDIF() + +IF(NOT DEFINED NPP_GENERATOR_PATH) + set(NPP_GENERATOR_PATH ${CMAKI_PATH}/../cmaki_generator) +ENDIF() + +IF(NOT DEFINED NPP_PACKAGE_JSON_FILE) + set(NPP_PACKAGE_JSON_FILE ${CMAKI_PATH}/../../artifacts.json) +ENDIF() + +if(NOT DEFINED CMAKI_IDENTIFIER OR NOT DEFINED CMAKI_PLATFORM) + set(ENV{CMAKI_INFO} ALL) + include(${CMAKI_PWD}/bin/cmaki_identifier.cmake) + set(CMAKI_IDENTIFIER "${PLATFORM}") + set(CMAKI_PLATFORM "${PLATFORM}") +endif() + +MESSAGE("CMAKI_PWD = ${CMAKI_PWD}") +MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") +MESSAGE("CMAKI_PATH = ${CMAKI_PATH}") +MESSAGE("NPP_ARTIFACTS_PATH = ${NPP_ARTIFACTS_PATH}") +MESSAGE("NPP_GENERATOR_PATH = ${NPP_GENERATOR_PATH}") +MESSAGE("NPP_PACKAGE_JSON_FILE = ${NPP_PACKAGE_JSON_FILE}") +MESSAGE("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") +MESSAGE("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") +MESSAGE("CMAKI_IDENTIFIER = ${CMAKI_IDENTIFIER}") +MESSAGE("CMAKI_PLATFORM = ${CMAKI_PLATFORM}") + +function(cmaki_find_package) + + message("-- begin cmaki_find_package") + + set(PARAMETERS ${ARGV}) + list(LENGTH PARAMETERS ARGV_LENGTH) + list(GET PARAMETERS 0 PACKAGE) + set(VERSION_REQUEST "") + set(CALL_RECURSIVE "TRUE") + set(PARM1 "") + if(ARGV_LENGTH GREATER 1) + list(GET PARAMETERS 1 PARM1) + message("-- extra parm1: ${PARM1}") + if(PARM1 STREQUAL "NONRECURSIVE") + message("${PACKAGE} is not recursive") + set(CALL_RECURSIVE "FALSE") + else() + message("${PACKAGE} is recursive") + set(VERSION_REQUEST "${PARM1}") + endif() + endif() + + IF(NOT DEFINED CMAKI_REPOSITORY) + set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") + ENDIF() + + # 2.5. define flags + set(FORCE_GENERATION NOT "$ENV{NPP_CACHE}") + + if(VERSION_REQUEST STREQUAL "") + ## + message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE}") + ## + # 1. obtener la version actual (o ninguno en caso de no tener el artefacto) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + set(VERSION_REQUEST "${RESULT_VERSION}") + set(EXTRA_VERSION "--version=${VERSION_REQUEST}") + else() + set(VERSION_REQUEST "") + set(EXTRA_VERSION "") + endif() + + else() + # explicit version required from parameters + set(EXTRA_VERSION "--version=${VERSION_REQUEST}") + endif() + + message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION}") + ####################################################### + # 2. obtener la mejor version buscando en la cache local y remota + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + list(GET RESULT_VERSION 0 PACKAGE_MODE) + list(GET RESULT_VERSION 1 PACKAGE_NAME) + list(GET RESULT_VERSION 2 VERSION) + message("now PACKAGE_MODE = ${PACKAGE_MODE}") + message("now PACKAGE_NAME = ${PACKAGE_NAME}") + message("now VERSION = ${VERSION}") + if(PACKAGE_MODE STREQUAL "UNSUITABLE") + set(PACKAGE_MODE "EXACT") + set(VERSION ${VERSION_REQUEST}) + message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 1)") + # avoid remote cache, need build + set(FORCE_GENERATION "TRUE") + endif() + else() + set(PACKAGE_MODE "EXACT") + set(VERSION ${VERSION_REQUEST}) + message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 2)") + # avoid remote cache, need build + set(FORCE_GENERATION "TRUE") + endif() + ####################################################### + + # cmaki_find_package of depends + message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE DEPENDS_PACKAGES + OUTPUT_STRIP_TRAILING_WHITESPACE) + + if("${CALL_RECURSIVE}") + foreach(DEP ${DEPENDS_PACKAGES}) + if(PACKAGE STREQUAL "${DEP}") + message("-- skip: ${DEP}") + else() + message("-- cmaki_find_package: ${DEP}") + cmaki_find_package("${DEP}" NONRECURSIVE) + endif() + endforeach() + endif() + + get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) + get_filename_component(package_name_version "${package_dir}" NAME) + + # 3. si no tengo los ficheros de cmake, los intento descargar + set(artifacts_dir "${NPP_ARTIFACTS_PATH}") + set(depends_bin_package "${artifacts_dir}/${PACKAGE}-${VERSION}") + set(depends_package "${artifacts_dir}/${PACKAGE}-${VERSION}") + # pido un paquete, en funcion de: + # - paquete + # - version + # - plataforma + # - modo (COMPATIBLE / EXACT) + # Recibo el que mejor se adapta a mis especificaciones + # Otra opcion es enviar todos los ficheros de cmake de todas las versiones + + set(package_cmake_filename "${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz") + set(package_marker "${CMAKE_PREFIX_PATH}/${package_name_version}/${CMAKI_IDENTIFIER}.cmake") + set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") + set(package_generated_file ${artifacts_dir}/${package_filename}) + + set(COPY_SUCCESFUL FALSE) + IF(EXISTS "${package_cmake_abspath}") + message("-- reusing cmake file ${package_cmake_abspath}") + set(COPY_SUCCESFUL TRUE) + else() + if(NOT "${FORCE_GENERATION}") + set(http_package_cmake_filename "${CMAKI_REPOSITORY}/download.php?file=${package_cmake_filename}") + message("-- download file: ${http_package_cmake_filename} in ${package_cmake_abspath}") + cmaki_download_file("${http_package_cmake_filename}" "${package_cmake_abspath}") + if(NOT "${COPY_SUCCESFUL}") + file(REMOVE "${package_binary_filename}") + message("Error downloading ${http_package_cmake_filename}") + endif() + else() + message("WARN: no using cache remote for: ${PACKAGE}") + endif() + endif() + + if(NOT "${COPY_SUCCESFUL}") + message("fail download") + else() + message("reused or downloaded") + endif() + + # si la descarga no ha ido bien O no quieres utilizar cache + if(NOT "${COPY_SUCCESFUL}" OR FORCE_GENERATION STREQUAL "TRUE") + + # 5. compilo y genera el paquete en local + message("Generating artifact ${PACKAGE} ...") + + ### + message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o") + ### + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + RESULT_VARIABLE artifacts_result + ) + if(artifacts_result) + message(FATAL_ERROR "can't create artifact ${PACKAGE}: error ${artifacts_result}") + endif() + + ####################################################### + # 6: obtengo la version del paquete creado + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + list(GET RESULT_VERSION 0 PACKAGE_MODE) + list(GET RESULT_VERSION 1 PACKAGE_NAME) + list(GET RESULT_VERSION 2 VERSION) + message("NEW! PACKAGE_MODE = ${PACKAGE_MODE}") + message("NEW! PACKAGE_NAME = ${PACKAGE_NAME}") + message("NEW! VERSION = ${VERSION}") + else() + message(FATAL_ERROR "-- not found ${PACKAGE}.") + endif() + ####################################################### + + set(package_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz) + set(package_cmake_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz) + # refresh name (NEW $VERSION is generated) + set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") + + # 7. descomprimo el artefacto + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" + WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}" + RESULT_VARIABLE uncompress_result + ) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") + endif() + + # y tambien descomprimo el propio tar gz + # execute_process( + # COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_generated_file}" + # WORKING_DIRECTORY "${artifacts_dir}/" + # RESULT_VARIABLE uncompress_result2 + # ) + # if(uncompress_result2) + # message(FATAL_ERROR "Extracting ${package_generated_file} failed! Error ${uncompress_result2}") + # endif() + + # tengo el cmake pero no esta descomprimido + elseif(EXISTS "${package_cmake_abspath}" AND NOT EXISTS "${package_marker}") + + message("-- only uncompress") + ################ + message("${CMAKE_COMMAND} -E tar zxf ${package_cmake_abspath}") + ################ + + # 10. lo descomprimo + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" + WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}/" + RESULT_VARIABLE uncompress_result) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") + endif() + + else() + + # tengo cmake, y esta descomprmido + message("-- nothing to do") + message("-- ${package_cmake_abspath}") + message("-- ${package_marker}") + + endif() + + + # 12. hacer find_package tradicional, ahora que tenemos los ficheros de cmake + if(${PACKAGE_MODE} STREQUAL "EXACT") + message("-- using ${PACKAGE} ${VERSION} in EXACT") + find_package(${PACKAGE} ${VERSION} EXACT REQUIRED) + else() + message("-- using ${PACKAGE} ${VERSION} in COMPATIBLE") + find_package(${PACKAGE} ${VERSION} REQUIRED) + endif() + + # generate json + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/save_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} --version=${VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + if(RESULT_VERSION) + message("error saving ${PACKAGE}:${VERSION} in ${artifacts_dir}") + endif() + + # 13 add includes + string(TOUPPER "${PACKAGE}" PACKAGE_UPPER) + foreach(INCLUDE_DIR ${${PACKAGE_UPPER}_INCLUDE_DIRS}) + list(APPEND CMAKI_INCLUDE_DIRS "${INCLUDE_DIR}") + endforeach() + + # 14. add libdirs + foreach(LIB_DIR ${${PACKAGE_UPPER}_LIBRARIES}) + list(APPEND CMAKI_LIBRARIES "${LIB_DIR}") + endforeach() + + # 15. add vers specific + set(${PACKAGE_UPPER}_INCLUDE_DIRS "${${PACKAGE_UPPER}_INCLUDE_DIRS}" PARENT_SCOPE) + set(${PACKAGE_UPPER}_LIBRARIES "${${PACKAGE_UPPER}_LIBRARIES}" PARENT_SCOPE) + + # 16. add vars globals + set(CMAKI_INCLUDE_DIRS "${CMAKI_INCLUDE_DIRS}" PARENT_SCOPE) + set(CMAKI_LIBRARIES "${CMAKI_LIBRARIES}" PARENT_SCOPE) + + message("-- end cmaki_find_package") + +endfunction() + +macro(cmaki_package_version_check) + # llamar a check_remote_version + # dando el nombre recibo la version + execute_process( + COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE_FIND_NAME} --version=${PACKAGE_FIND_VERSION} + WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" + OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) + list(GET RESULT_VERSION 0 RESULT) + list(GET RESULT_VERSION 1 NAME) + list(GET RESULT_VERSION 2 VERSION) + ################################### + set(PACKAGE_VERSION_${RESULT} 1) + set(${NAME}_VERSION ${VERSION}) +endmacro() + +function(cmaki_install_3rdparty) + foreach(CMAKI_3RDPARTY_TARGET ${ARGV}) + foreach(CMAKI_BUILD_TYPE ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) + string(TOUPPER "${CMAKI_BUILD_TYPE}" CMAKI_BUILD_TYPE_UPPER) + get_target_property(CMAKI_3RDPARTY_TARGET_TYPE ${CMAKI_3RDPARTY_TARGET} TYPE) + if(${CMAKI_3RDPARTY_TARGET_TYPE} STREQUAL "SHARED_LIBRARY") + get_target_property(CMAKI_3RDPARTY_TARGET_LOCATION ${CMAKI_3RDPARTY_TARGET} IMPORTED_LOCATION_${CMAKI_BUILD_TYPE_UPPER}) + get_target_property(CMAKI_3RDPARTY_TARGET_SONAME ${CMAKI_3RDPARTY_TARGET} IMPORTED_SONAME_${CMAKI_BUILD_TYPE_UPPER}) + get_target_property(CMAKI_3RDPARTY_TARGET_PDB ${CMAKI_3RDPARTY_TARGET} IMPORTED_PDB_${CMAKI_BUILD_TYPE_UPPER}) + if(CMAKI_3RDPARTY_TARGET_SONAME) + get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION_PATH "${CMAKI_3RDPARTY_TARGET_LOCATION}" PATH) + set(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION_PATH}/${CMAKI_3RDPARTY_TARGET_SONAME}") + endif() + get_filename_component(CMAKI_3RDPARTY_TARGET_INSTALLED_NAME "${CMAKI_3RDPARTY_TARGET_LOCATION}" NAME) + get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION}" REALPATH) + install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_LOCATION} + DESTINATION ${CMAKI_BUILD_TYPE} + CONFIGURATIONS ${CMAKI_BUILD_TYPE} + RENAME ${CMAKI_3RDPARTY_TARGET_INSTALLED_NAME}) + if((NOT UNIX) AND EXISTS ${CMAKI_3RDPARTY_TARGET_PDB}) + get_filename_component(CMAKI_3RDPARTY_TARGET_PDB_NAME "${CMAKI_3RDPARTY_TARGET_PDB}" NAME) + install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_PDB} + DESTINATION ${CMAKI_BUILD_TYPE} + CONFIGURATIONS ${CMAKI_BUILD_TYPE} + RENAME ${CMAKI_3RDPARTY_TARGET_PDB_NAME}) + endif() + endif() + endforeach() + endforeach() +endfunction() + +function(cmaki_download_file THE_URL INTO_FILE) + set(COPY_SUCCESFUL FALSE PARENT_SCOPE) + file(DOWNLOAD ${THE_URL} ${INTO_FILE} STATUS RET) + list(GET RET 0 RET_CODE) + if(RET_CODE EQUAL 0) + set(COPY_SUCCESFUL TRUE PARENT_SCOPE) + else() + set(COPY_SUCCESFUL FALSE PARENT_SCOPE) + endif() +endfunction() + +macro(cmaki_download_package) + + message("-- begin cmaki_download_package") + if(NOT DEFINED CMAKI_REPOSITORY) + set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") + endif() + get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) + get_filename_component(package_name_version "${package_dir}" NAME) + set(package_filename "${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz") + set(http_package_filename ${CMAKI_REPOSITORY}/download.php?file=${package_filename}) + set(artifacts_dir "${NPP_ARTIFACTS_PATH}") + get_filename_component(artifacts_dir "${artifacts_dir}" ABSOLUTE) + set(package_binary_filename "${artifacts_dir}/${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz") + set(package_uncompressed_dir "${artifacts_dir}/${package_name_version}-binary.tmp") + set(package_marker "${artifacts_dir}/${package_name_version}/${CMAKI_IDENTIFIER}") + set(package_compressed_md5 "${package_dir}/${package_name_version}-${CMAKI_IDENTIFIER}.md5") + set(_MY_DIR "${package_dir}") + set(_DIR "${artifacts_dir}/${package_name_version}") + + if(NOT EXISTS "${package_binary_filename}") + message("download ${package_binary_filename} ...") + if(EXISTS "${package_compressed_md5}") + file(READ "${package_compressed_md5}" md5sum ) + string(REGEX MATCH "[0-9a-fA-F]*" md5sum "${md5sum}") + # TODO: use md5sum (use python for download) + # cmaki_download_file("${http_package_filename}" "${package_binary_filename}" "${md5sum}" ) + message("downloading ${http_package_filename}") + cmaki_download_file("${http_package_filename}" "${package_binary_filename}") + if(NOT "${COPY_SUCCESFUL}") + file(REMOVE "${package_binary_filename}") + message(FATAL_ERROR "Error downloading ${http_package_filename}") + endif() + else() + file(REMOVE_RECURSE "${package_dir}") + file(REMOVE_RECURSE "${_DIR}") + MESSAGE(FATAL_ERROR "Checksum for ${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz not found. Rejecting to download an untrustworthy file.") + endif() + endif() + + if(NOT EXISTS "${package_marker}") + message("Extracting ${package_binary_filename} into ${package_uncompressed_dir}...") + file(MAKE_DIRECTORY "${package_uncompressed_dir}") + execute_process( + COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_binary_filename}" + WORKING_DIRECTORY "${package_uncompressed_dir}" + RESULT_VARIABLE uncompress_result) + if(uncompress_result) + message(FATAL_ERROR "Extracting ${package_binary_filename} failed! Error ${uncompress_result}") + endif() + file(COPY "${package_uncompressed_dir}/${package_name_version}" DESTINATION "${artifacts_dir}") + file(REMOVE_RECURSE "${package_uncompressed_dir}") + endif() + message("-- end cmaki_download_package") + +endmacro() + +function(cmaki_executable) + cmaki_parse_parameters(${ARGV}) + set(_EXECUTABLE_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_EXECUTABLE_NAME}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + if(WIN32) + ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) + else() + ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) + endif() + # set_target_properties(${_EXECUTABLE_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_EXECUTABLE_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_EXECUTABLE_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_EXECUTABLE_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_EXECUTABLE_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() + generate_vcxproj_user(${_EXECUTABLE_NAME}) + +endfunction() + +function(cmaki_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) + # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_static_library) + cmaki_parse_parameters(${ARGV}) + set(_LIBRARY_NAME ${_MAIN_NAME}) + source_group( "Source Files" FILES ${_SOURCES} ) + common_flags() + common_linking(${_LIBRARY_NAME}) + add_definitions(-D${_LIBRARY_NAME}_STATIC) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_library(${_LIBRARY_NAME} STATIC ${_SOURCES}) + # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_LIBRARY_NAME} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_LIBRARY_NAME} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endfunction() + +function(cmaki_test) + cmaki_parse_parameters(${ARGV}) + set(_TEST_NAME ${_MAIN_NAME}) + set(_TEST_SUFFIX "_unittest") + common_flags() + common_linking(${_TEST_NAME}${_TEST_SUFFIX}) + include_directories(node_modules) + foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) + endforeach() + IF(WITH_CONAN) + include_directories(${CONAN_INCLUDE_DIRS}) + ENDIF() + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + add_compile_options(-pthread) + endif() + endif() + add_executable(${_TEST_NAME}${_TEST_SUFFIX} ${_SOURCES}) + # set_target_properties(${_TEST_NAME}${_TEST_SUFFIX} PROPERTIES DEBUG_POSTFIX _d) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${_DEPENDS}) + foreach(LIB_DIR ${CMAKI_LIBRARIES}) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${LIB_DIR}) + cmaki_install_3rdparty(${LIB_DIR}) + endforeach() + IF(WITH_CONAN) + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${CONAN_LIBS}) + cmaki_install_3rdparty(${CONAN_LIBS}) + ENDIF() + install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) + if(HAVE_PTHREADS) + if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + message("-- android no need extra linkage for pthreads") + else() + target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} -lpthread) + endif() + endif() + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_TEST_NAME}${_TEST_SUFFIX} + DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} + CONFIGURATIONS ${BUILD_TYPE}) + if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) + find_program(VALGRIND "valgrind") + if(VALGRIND) + add_test( + NAME ${_TEST_NAME}_valgrind_memcheck + COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_cachegrind + COMMAND "${VALGRIND}" --tool=cachegrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_helgrind + COMMAND "${VALGRIND}" --tool=helgrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_callgrind + COMMAND "${VALGRIND}" --tool=callgrind $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + add_test( + NAME ${_TEST_NAME}_valgrind_drd + COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ --gmock_verbose=error + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE} + ) + else() + message(FATAL_ERROR "no valgrind detected") + endif() + endif() + if(WIN32) + add_test( + NAME ${_TEST_NAME}${_TEST_SUFFIX} + COMMAND $ + WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE}) + else() + add_test( + NAME ${_TEST_NAME}${_TEST_SUFFIX} + COMMAND bash ../cmaki_emulator.sh $ + WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} + CONFIGURATIONS ${BUILD_TYPE}) + endif() + endforeach() + generate_vcxproj_user(${_TEST_NAME}) + +endfunction() + +macro(cmaki_google_test) + find_package(GTest REQUIRED) + find_package(GMock REQUIRED) + add_definitions(-DWITH_MAIN) + add_definitions(-DWITH_GMOCK) + set(PARAMETERS ${ARGV}) + list(GET PARAMETERS 0 _MAIN_NAME) + cmaki_test(${ARGV}) +endmacro() + +macro(cmaki_python_library) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + cmaki_library(${ARGV} PTHREADS) + cmaki_parse_parameters(${ARGV}) + set_target_properties(${_MAIN_NAME} PROPERTIES PREFIX "") + foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) + INSTALL( TARGETS ${_MAIN_NAME} + DESTINATION ${BUILD_TYPE}/lib/python3.5/lib-dynload + CONFIGURATIONS ${BUILD_TYPE}) + endforeach() +endmacro() + +macro(cmaki_boost_python_test) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + cmaki_google_test(${ARGV} PTHREADS) + cmaki_parse_parameters(${ARGV}) + set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") +endmacro() + +macro(cmaki_python_test) + # cmaki_find_package(python) + cmaki_parse_parameters(${ARGV}) + add_test( NAME ${_MAIN_NAME}_test + COMMAND ./bin/python3 ${_SOURCES} + WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) + set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "LD_LIBRARY_PATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") +endmacro() + +macro(cmaki_python_install) + # cmaki_find_package(python) + # cmaki_find_package(boost-python) + get_filename_component(PYTHON_DIR ${PYTHON_EXECUTABLE} DIRECTORY) + get_filename_component(PYTHON_PARENT_DIR ${PYTHON_DIR} DIRECTORY) + cmaki_install_inside_dir(${PYTHON_PARENT_DIR}) +endmacro() + +macro(cmaki_find_package_boost) + if(CMAKE_BUILD_TYPE MATCHES Debug) + set(Boost_DEBUG 1) + else() + set(Boost_DEBUG 0) + endif() + find_package(Boost REQUIRED) + include_directories(${Boost_INCLUDE_DIRS}) +endmacro() + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format new file mode 100644 index 0000000..008e6b0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format @@ -0,0 +1,66 @@ +--- +Language: Cpp +# BasedOnStyle: WebKit +# indent public: +AccessModifierOffset: -4 +AlignAfterOpenBracket: false +AlignEscapedNewlinesLeft: false +AlignOperands: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortBlocksOnASingleLine: false +AllowShortCaseLabelsOnASingleLine: false +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AllowShortFunctionsOnASingleLine: All +AlwaysBreakAfterDefinitionReturnType: false +AlwaysBreakTemplateDeclarations: true +AlwaysBreakBeforeMultilineStrings: false +BreakBeforeBinaryOperators: All +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: true +BinPackParameters: true +BinPackArguments: true +ColumnLimit: 100 +ConstructorInitializerAllOnOneLineOrOnePerLine: false +ConstructorInitializerIndentWidth: 4 +DerivePointerAlignment: false +ExperimentalAutoDetectBinPacking: false +IndentCaseLabels: true +IndentWrappedFunctionNames: false +IndentFunctionDeclarationAfterType: false +MaxEmptyLinesToKeep: 2 +KeepEmptyLinesAtTheStartOfBlocks: true +NamespaceIndentation: Inner +ObjCBlockIndentWidth: 4 +ObjCSpaceAfterProperty: true +ObjCSpaceBeforeProtocolList: true +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 300 +PenaltyBreakString: 1000 +PenaltyBreakFirstLessLess: 120 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 60 +PointerAlignment: Left +SpacesBeforeTrailingComments: 2 +Cpp11BracedListStyle: true +Standard: Cpp11 +IndentWidth: 4 +TabWidth: 4 +UseTab: Always +BreakBeforeBraces: Allman +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpacesInAngles: false +SpaceInEmptyParentheses: false +SpacesInCStyleCastParentheses: false +SpaceAfterCStyleCast: false +SpacesInContainerLiterals: true +SpaceBeforeAssignmentOperators: true +ContinuationIndentWidth: 4 +CommentPragmas: '^ IWYU pragma:' +ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] +SpaceBeforeParens: ControlStatements +DisableFormat: false +... + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl new file mode 100644 index 0000000..3ea29e5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl @@ -0,0 +1,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BuildName: + BuildStamp: + Name: + Generator: + CompilerName: + OSName: + Hostname: + OSRelease: + OSVersion: + OSPlatform: + Is64Bits: + VendorString: + VendorID: + FamilyID: + ModelID: + ProcessorCacheSize: + NumberOfLogicalCPU: + NumberOfPhysicalCPU: + TotalVirtualMemory: + TotalPhysicalMemory: + LogicalProcessorsPerPhysical: + ProcessorClockFrequency: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md new file mode 100644 index 0000000..4f989c6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md @@ -0,0 +1,3 @@ +# Source +https://bitbucket.org/shackra/ctest-jenkins/ + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml new file mode 100644 index 0000000..020ec9d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml @@ -0,0 +1,4 @@ +services: docker +os: linux +script: + - ./build.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE new file mode 100644 index 0000000..53546c1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md new file mode 100644 index 0000000..594568c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md @@ -0,0 +1,11 @@ +# cmaki_docker + +[![Build Status](https://travis-ci.org/makiolo/cmaki_docker.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_docker) + +multiple pusher of docker images. + +``` +for image in (windows-x86, windows-x64, linux-x86, linux-x64, ...) + makiolo/$image = dockcross/$image + github:makiolo/cmaki_scripts/cmaki_depends.sh +done +``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh new file mode 100755 index 0000000..26e71f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +#/bin/bash +prefix=$(pwd)/bin +mkdir -p $prefix + +# iterate in known images +curl https://raw.githubusercontent.com/dockcross/dockcross/master/Makefile -o dockcross-Makefile +for image in $(make -f dockcross-Makefile display_images); do + if [[ $(docker images -q dockcross/$image) != "" ]]; then + docker rmi -f dockcross/$image + echo dockcross/$image removed. + fi +done + +for image in $(make -f dockcross-Makefile display_images); do + + if [[ "$image" == "manylinux-x86" ]]; then + continue + fi + + if [[ "$image" == "manylinux-x64" ]]; then + continue + fi + + echo "copy dockcross/$image to makiolo/$image (with script change)" + cat<Dockerfile +FROM dockcross/$image:latest +ENV DEBIAN_FRONTEND noninteractive +RUN curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/cmaki_depends.sh | bash +EOF + + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + docker build . -t makiolo/$image + docker push makiolo/$image + + # clean + docker rmi -f dockcross/$image + docker rmi -f makiolo/$image +done + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt new file mode 100644 index 0000000..91cc3ac --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt @@ -0,0 +1,95 @@ +project(cmaki_generator) +cmake_minimum_required(VERSION 3.0) + +MESSAGE("-- compiler ${CMAKI_COMPILER}, platform ${CMAKI_PLATFORM}") + +include(cmaki) + +IF(CMAKE_BUILD_TYPE STREQUAL "Debug") + MESSAGE("-- Debug Mode") + SET(GLOBAL_BUILD_MODE "Debug") +ELSEIF(CMAKE_BUILD_TYPE STREQUAL "Release") + MESSAGE("-- Release Mode") + SET(GLOBAL_BUILD_MODE "Release") +ELSEIF(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") + MESSAGE("-- RelWithDebInfo Mode") + SET(GLOBAL_BUILD_MODE "RelWithDebInfo") +ELSE() + MESSAGE("-- Build mode default to Release") + MESSAGE("-- Release Mode") + SET(GLOBAL_BUILD_MODE "Release") +ENDIF() + +IF(NOT PACKAGE) + SET(PACKAGE "packagename_invalid") + MESSAGE(FATAL_ERROR "Invalid package name") +ENDIF() + +IF(NOT PACKAGE_VERSION) + SET(PACKAGE_VERSION "verson_invalid") + MESSAGE(FATAL_ERROR "Invalid version in package") +ENDIF() + +IF(NOT LIBRARY_TYPE) + SET(LIBRARY_TYPE "STATIC") +ENDIF() + +SET(PACKAGE "${PACKAGE}" CACHE STRING "Package to compile") +SET(PACKAGE_VERSION "${PACKAGE_VERSION}" CACHE STRING "Version to compile") + +SET(CMAKE_VERBOSE_MAKEFILE ON) +# Use relative paths on Windows, to reduce path size for command-line limits +if (WIN32) + set(CMAKE_USE_RELATIVE_PATHS true) + set(CMAKE_SUPPRESS_REGENERATION true) +endif() + +IF(NOT DEFINED GTC_INSTALL_PREFIX) + SET(GTC_INSTALL_PREFIX "${NPP_ARTIFACTS_PATH}/${PACKAGE}-${PACKAGE_VERSION}-${CMAKI_PLATFORM}/${PACKAGE}-${PACKAGE_VERSION}") + SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}") + SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder executables") + SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder libs") +ELSE() + SET(GTC_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") + SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") + SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/bin" CACHE PATH "Folder executables") + SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/lib" CACHE PATH "Folder libs") +ENDIF() + +MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") +MESSAGE("GTC_INSTALL_PREFIX = ${GTC_INSTALL_PREFIX}") +MESSAGE("CMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}") +MESSAGE("EXECUTABLE_OUTPUT_PATH = ${EXECUTABLE_OUTPUT_PATH}") +MESSAGE("LIBRARY_OUTPUT_PATH = ${LIBRARY_OUTPUT_PATH}") + +# gnu variables can prepend CMAKE_INSTALL_PREFIX +set(CMAKE_INSTALL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") +set(CMAKE_INSTALL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") +set(CMAKE_INSTALL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") +set(CMAKE_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") +set(CMAKE_INSTALL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") +set(CMAKE_INSTALL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") +set(CMAKE_INSTALL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") +set(CMAKE_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") +set(CMAKE_INSTALL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") +set(CMAKE_INSTALL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") +set(CMAKE_INSTALL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") +set(CMAKE_INSTALL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") +set(CMAKE_INSTALL_FULL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") +set(CMAKE_INSTALL_FULL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") +set(CMAKE_INSTALL_FULL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") +set(CMAKE_INSTALL_FULL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") +set(CMAKE_INSTALL_FULL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") +set(CMAKE_INSTALL_FULL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") +set(CMAKE_INSTALL_FULL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") +set(CMAKE_INSTALL_FULL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") +set(CMAKE_INSTALL_FULL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_FULL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") +set(CMAKE_INSTALL_FULL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") +set(CMAKE_INSTALL_FULL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") +set(CMAKE_INSTALL_FULL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") +set(CMAKE_INSTALL_FULL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") +LINK_DIRECTORIES(${LIBRARY_OUTPUT_PATH}) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE new file mode 100644 index 0000000..7e79e4d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md new file mode 100644 index 0000000..6b5b746 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md @@ -0,0 +1,22 @@ +# cmaki_generator [![npm version](https://badge.fury.io/js/cmaki_generator.svg)](https://badge.fury.io/js/cmaki_generator) + +gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_generator.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_generator) + +# artifacts responsability +- boost-headers +- boost-system +- boost-random +- boost-atomic +- boost-thread +- boost-chrono +- boost-context +- boost-coroutine2 +- boost-signals +- boost-test +- boost-regex +- boost-filesystem +- boost-program-options +- python +- boost-python +- boost-python-debug +- boost-serialization diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build new file mode 100755 index 0000000..c98e1d8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build @@ -0,0 +1,10 @@ +#!/bin/bash + +directory=$(dirname $0) +if hash cygpath 2>/dev/null; then + directory=$(cygpath -w ${directory}) +fi + +python "${directory}/build.py" "$@" +out=$? +exit ${out} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd new file mode 100644 index 0000000..e0ea6bd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd @@ -0,0 +1,11 @@ +@ECHO OFF +SET DIRWORK=%~dp0 + +IF EXIST "%PYTHON%" ( + rem ok +) ELSE ( + set PYTHON=python +) + +SET PATH=%~dp0\bin;%PATH% +"%PYTHON%" %DIRWORK%\build.py %* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py new file mode 100644 index 0000000..5d86829 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py @@ -0,0 +1,757 @@ +import os +import os.path +import sys +import fnmatch +import logging +import utils +import argparse +import pipeline +import traceback +import copy +import datetime +# object package +from third_party import ThirdParty +from collections import OrderedDict +from third_party import exceptions_fail_group +from third_party import exceptions_fail_program +from third_party import alias_priority_name +from third_party import alias_priority_name_inverse +from third_party import CMAKELIB_URL +from third_party import is_valid +from third_party import is_blacklisted +from third_party import prepare_cmakefiles +# gtc stages +from purge import purge +from prepare import prepare +from compilation import compilation +from packing import packing +from run_tests import run_tests +from upload import upload +from get_return_code import get_return_code +from third_party import FailThirdParty + +# GLOBAL NO MUTABLES +image_pattern = "image.%Y.%m.%d.%H%M" + +try: + import yaml +except ImportError: + logging.error('[Warning] Not yaml library present') + logging.error('[Warning] PyYAML (python extension) is mandatory') + if utils.is_windows(): + logging.error('You can use pip for install:') + logging.error(' pip intall pyyaml') + sys.exit(1) + +# Global mutable +compiler_replace_maps = {} + +# Global const +yaml_common_references = 'common.yml' +yaml_collapsed_third_parties = '.3p.yml' +yaml_collapsed_final = '.data.yml' + +class Loader(yaml.Loader): + def __init__(self, stream): + self._root = os.path.split(stream.name)[0] + super(Loader, self).__init__(stream) + + def include(self, node): + filename = os.path.join(self._root, self.construct_scalar(node)) + with open(filename, 'r') as f: + return yaml.load(f, Loader) + +def amalgamation_yaml(rootdir, yamlfile=None): + Loader.add_constructor('!include', Loader.include) + + # autogeneration .data.yml + yaml_collapsed_final_abspath = os.path.join(rootdir, yaml_collapsed_final) + yaml_common_references_abspath = os.path.join(rootdir, yaml_common_references) + with open(yaml_collapsed_final_abspath, 'wt') as f: + f.write('# autogenerated file, dont edit it !!!---\n') + f.write('---\n') + # inject common.yml + f.write('%sreferences:\n' % (' '*4)) + with open(yaml_common_references_abspath, 'r') as fr: + for line in fr.readlines(): + f.write('%s%s' % (' '*8, line)) + collapse_third_parties(rootdir, yaml_collapsed_third_parties, yamlfile=yamlfile) + if yamlfile is None and not parameters.no_back_yaml: + node_modules_dir = os.path.abspath(os.path.join(rootdir, '..', '..')) + for path in os.listdir(node_modules_dir): + fullpath = os.path.join(os.path.abspath(node_modules_dir), path) + if os.path.isdir(fullpath): + cmaki_file = os.path.join(fullpath, 'cmaki.yml') + if os.path.isfile(cmaki_file): + with open(cmaki_file, 'r') as fr: + with open(yaml_collapsed_third_parties, 'a') as tp_append: + for line in fr.readlines(): + tp_append.write(line) + # inject third_parties.yml + f.write('%sthird_parties:\n' % (' '*4)) + with open(yaml_collapsed_third_parties) as ft: + for line in ft.readlines(): + # sys.stdout.write("searching {}".format(line)) + f.write('%s%s' % (' '*8, line)) + +def search_nodes_by_key(list_nodes, found_key): + nodes = [] + for key, node in list_nodes: + if key == found_key: + nodes.append(node) + return nodes + +def collapse_third_parties(rootdir, filename, yamlfile=None): + p = pipeline.make_pipe() + # begin + if yamlfile is None: + p = pipeline.find(rootdir, 3)(p) + else: + p = pipeline.echo(yamlfile)(p) + # exclusions + p = pipeline.endswith('.yml')(p) + p = pipeline.grep_v('.travis.yml')(p) + p = pipeline.grep_v('shippable.yml')(p) + p = pipeline.grep_v('appveyor.yml')(p) + p = pipeline.grep_v('codecov.yml')(p) + p = pipeline.grep_v('.github')(p) + p = pipeline.grep_v('docker-compose.yml')(p) + p = pipeline.grep_v('circle.yml')(p) + p = pipeline.grep_v('_config.yml')(p) + p = pipeline.grep_v('.circleci-matrix.yml')(p) + p = pipeline.grep_v('.build_')(p) + p = pipeline.grep_v(yaml_collapsed_final)(p) + p = pipeline.grep_v(yaml_common_references)(p) + p = pipeline.grep_v(yaml_collapsed_third_parties)(p) + p = pipeline.grep_v(' - Copy.yml')(p) + p = pipeline.info('---> (yaml found.) ')(p) + # cat + p = pipeline.cat()(p) + # p = pipeline.info('amalgamated: ')(p) + # write + p = pipeline.write_file(filename)(p) + # end + pipeline.end_pipe()(p) + +def run_purge(solutions): + + # create pipeline + with pipeline.create() as (p, finisher): + + # feed all packages + p = pipeline.feed(packages)(p) + + # clean intermediate folders + p = pipeline.do(purge, True, parameters)(p) + + # close pipe + finisher.send(p) + +def convert_priority_to_integer(priority): + if priority is not None: + error = False + if priority in alias_priority_name_inverse: + priority = alias_priority_name_inverse[priority] + else: + try: + priority_integer = int(priority) + if priority_integer in alias_priority_name: + priority = priority_integer + else: + error = True + except ValueError: + error = True + if error: + logging.error('Invalid priority name: %s' % priority) + sys.exit(1) + return priority + +def show_results(parameters, groups_ordered, rets, unittests): + # show final report + anyFail = 0 + if len(rets) > 0: + logging.info('-' * 80) + logging.info('') + for name in rets: + state = rets[name] + if state != "OK": + anyFail = 1 + + # package with unittests? + if name in unittests: + try: + result_test = unittests[name] + except KeyError: + result_test = 'No unittest found' + + if state != "OK": + logging.info("Compiled %30s - STATUS: %15s" % (name, state)) + else: + # only want know test result if is OK + logging.info("Compiled %30s - STATUS: %15s - TESTS: %s" % (name, state, result_test)) + else: + logging.info("Compiled %30s - STATUS: %15s" % (name, state)) + + logging.info('') + logging.info( '-'* 80) + else: + anyFail = 1 + logging.error('No results generated.') + + # any have exceptions ? + have_exceptions = False + for _, packages in groups_ordered: + for node in packages: + if len(node.exceptions) > 0: + have_exceptions = True + + if have_exceptions: + logging.error("---------- begin summary of exceptions ------------------------") + # show postponed exceptions + for _, packages in groups_ordered: + for node in packages: + if len(node.exceptions) > 0: + # something was wrong + anyFail = 1 + # show exceptions of this package + package = node.get_package_name() + version = node.get_version() + logging.error("package %s (%s) with exceptions" % (package, version)) + i = 0 + for exc_type, exc_value, exc_traceback in node.exceptions: + logging.error("---- Exception #%d / %d ----------" % (i+1, len(node.exceptions))) + traceback.print_exception(exc_type, exc_value, exc_traceback) + logging.error("----------------------------------") + i += 1 + logging.error("---------- end summary of exceptions ------------------------") + return anyFail + +def clean_subset(solutions): + groups = copy.deepcopy(solutions) + # 2/4: remove solutions are subset of other solution + for solution1 in solutions: + for solution2 in solutions: + if solution1 != solution2: + match = True + for node in solution1: + if node not in solution2: + match = False + break + if match and (solution1 in groups): + groups.remove(solution1) + return groups + +def init_parameter_path(value, default): + if value is None: + value = default + else: + # expand variables in no-windows + if not utils.is_windows(): + value = value.replace('~', utils.get_real_home()) + value = os.path.abspath(value) + return value + + +def parse_arguments(): + + parser = argparse.ArgumentParser(prog=""" + +cmaki_generator: + + Can build artifacts in a easy way. Each third-party need a block definition in yaml. This block contain all need information necessary for download, build, testing and packing. + +usage:""") + group_main = parser.add_argument_group('basic usage') + group_main.add_argument('packages', metavar='packages', type=str, nargs='*', + help='name (or list names) third party') + group_main.add_argument('--plan', '--dry-run', dest='plan', action='store_true', + help='Show packages plan (like a dry-run)', default=False) + group_main.add_argument('--server', dest='server', help='artifact server', default=None) + group_main.add_argument('--no-back-yaml', dest='no_back_yaml', action='store_true', help='no search back yaml', + default=False) + group_layer = group_main.add_mutually_exclusive_group() + group_layer.add_argument('--layer', dest='priority', + help='filter by layername. Valid values: (minimal|tools|third_party)', default=None) + group_layer.add_argument('--no-layer', dest='no_priority', + help='negation filter by layername. Valid values: (minimal|tools|third_party)', + default=None) + # group_main.add_argument('-t', '--tag', action='append', metavar='tag', type=str, help='NOT IMPLEMMENTED YET: filter tag third party') + group_padawan = parser.add_argument_group('padawan') + group_purge = group_padawan.add_mutually_exclusive_group() + group_purge.add_argument('--no-purge', dest='no_purge', action='store_true', help='remove purge from pipeline', + default=False) + group_purge.add_argument('--only-purge', dest='only_purge', action='store_true', + help='execute only purge in pipeline', default=False) + group_prepare = group_padawan.add_mutually_exclusive_group() + group_prepare.add_argument('--no-prepare', dest='no_prepare', action='store_true', + help='remove prepare from pipeline', default=False) + group_prepare.add_argument('--only-prepare', dest='only_prepare', action='store_true', + help='execute only prepare in pipeline', default=False) + group_compilation = group_padawan.add_mutually_exclusive_group() + group_compilation.add_argument('--no-compilation', dest='no_compilation', action='store_true', + help='remove compilation from pipeline', default=False) + group_compilation.add_argument('--only-compilation', dest='only_compilation', action='store_true', + help='execute only compilation in pipeline', default=False) + group_packing = group_padawan.add_mutually_exclusive_group() + group_packing.add_argument('--no-packing', dest='no_packing', action='store_true', + help='remove packing from pipeline', default=False) + group_packing.add_argument('--only-packing', dest='only_packing', action='store_true', + help='execute only packing in pipeline', default=False) + group_run_tests = group_padawan.add_mutually_exclusive_group() + group_run_tests.add_argument('--no-run-tests', dest='no_run_tests', action='store_true', + help='remove run_tests from pipeline', default=False) + group_run_tests.add_argument('--only-run-tests', dest='only_run_tests', action='store_true', + help='execute only run_tests in pipeline', default=False) + group_upload = group_padawan.add_mutually_exclusive_group() + group_upload.add_argument('--no-upload', dest='no_upload', action='store_true', help='remove upload from pipeline', + default=False) + group_upload.add_argument('--only-upload', dest='only_upload', action='store_true', + help='execute only upload in pipeline', default=False) + # creador de third parties + group_jedi = parser.add_argument_group('jedi') + group_jedi.add_argument('-o', '--only', dest='build_only', action='store_true', + help='build only explicit packages and not your depends') + group_jedi.add_argument('-v', '--verbose', action='count', help='verbose mode', default=0) + group_jedi.add_argument('-q', '--quiet', dest='quiet', action='store_true', help='quiet mode', default=False) + group_jedi.add_argument('-d', '--debug', action='store_true', help='Ridiculous debugging (probably not useful)') + group_jedi.add_argument('--purge-if-fail', dest='purge_if_fail', action='store_true', + help='purge even if a package finish with fail', default=False) + group_jedi.add_argument('--with-svn', dest='with_svn', help='svn executable', default=None) + group_jedi.add_argument('--fast', dest='fast', action='store_true', default=False, help=argparse.SUPPRESS) + group_jedi.add_argument('--log', dest='log', help='specified full path log (default is "gtc.log")', + default='gtc.log') + group_jedi.add_argument('--no-packing-cmakefiles', action='store_true', dest='no_packing_cmakefiles', + help='no packing cmakefiles', default=False) + group_jedi.add_argument('--blacklist', dest='blacklist', + help='third party in quarantine (default is $ROOTDIR + "blacklist.txt")', default=None) + group_jedi.add_argument('--no-blacklist', action='append', dest='no_blacklist', + help='list packages (separated with comma), for annular blacklist effect.', default=[]) + group_master_jedi = parser.add_argument_group('master jedi') + group_master_jedi.add_argument('--rootdir', dest='rootdir', + help='input folder with yamls, is recursive (default is current directory)', + default=None) + group_master_jedi.add_argument('--prefix', dest='prefix', + help='output folder where packages will be generated (default is $ROOTDIR + "artifacts")', + default=None) + group_master_jedi.add_argument('--cmakefiles', dest='cmakefiles', + help='input folder with cmake scripts (default is $PREFIX + "cmakelib")', + default=None) + group_master_jedi.add_argument('--third-party-dir', dest='third_party_dir', + help='output folder for cmakefiles (default is $CMAKEFILES + "3rdparty")', + default=None) + group_master_jedi.add_argument('--depends', dest='depends', help='json for save versions', default=None) + group_master_jedi.add_argument('--yaml', dest='yaml', help='unique file with third party to compile', default=None) + parameters = parser.parse_args() + ''' + TODO: + refactor: + prefix = DEPENDS_PATH (cmake3p) (artifacts) + cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmaki, cmaki_find_package) + third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) + rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD + CMAKI_INSTALL: donde se espera tener instalado el cmaki_identifier + ''' + + cmaki_pwd = os.environ.get('CMAKI_PWD', os.getcwd()) + cmaki_install = os.environ.get('CMAKI_INSTALL', os.path.join(cmaki_pwd, 'bin')) + + ''' + axiomas: + - cmaki_pwd + - cmaki_install + - cmaki + + reglas: + - rootdir = cmaki/../cmaki_generator + - prefix = cmaki_pwd/artifacts + - third-party-dir = prefix/cmaki_find_package + - depends = cmaki_pwd/depends.json + - blacklist = rootdir/blacklist.txt + ''' + + + parameters.rootdir = init_parameter_path(parameters.rootdir, os.getcwd()) + parameters.prefix = init_parameter_path(parameters.prefix, os.path.join(cmaki_pwd, 'artifacts')) + parameters.third_party_dir = init_parameter_path(parameters.third_party_dir, os.path.join(parameters.prefix, 'cmaki_find_package')) + parameters.cmakefiles = init_parameter_path(parameters.cmakefiles, os.path.join(parameters.rootdir, '..', 'cmaki')) + parameters.blacklist = init_parameter_path(parameters.blacklist, os.path.join(parameters.rootdir, 'blacklist.txt')) + parameters.depends = init_parameter_path(parameters.depends, os.path.join(cmaki_pwd, 'depends.json')) + + # convert priority to int + parameters.priority = convert_priority_to_integer(parameters.priority) + parameters.no_priority = convert_priority_to_integer(parameters.no_priority) + if parameters.only_purge: + parameters.no_purge = False + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_prepare: + parameters.no_purge = True + parameters.no_prepare = False + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_compilation: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = False + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_packing: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = False + parameters.no_run_tests = True + parameters.no_upload = True + elif parameters.only_run_tests: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = False + parameters.no_upload = True + elif parameters.only_upload: + parameters.no_purge = True + parameters.no_prepare = True + parameters.no_compilation = True + parameters.no_packing = True + parameters.no_run_tests = True + parameters.no_upload = False + + if parameters.server is None: + if 'NPP_SERVER' not in os.environ: + logging.warning('Using artifacts server by default. If you need, can explicit define environment var NPP_SERVER') + os.environ['NPP_SERVER'] = 'http://artifacts.myftp.biz' + parameters.server = os.environ['NPP_SERVER'] + + + if 'NPP_CACHE' not in os.environ: + logging.warning('Using enablibing npm++ cache by default.') + os.environ['NPP_CACHE'] = 'TRUE' + + return parameters + + +if __name__ == '__main__': + + parameters = parse_arguments() + + # prepare logging + if parameters.debug: + utils.setup_logging(logging.DEBUG, parameters.log) + else: + utils.setup_logging(logging.INFO, parameters.log) + + if parameters.verbose: + logging.info('parameters = {}'.format(parameters)) + + if not parameters.quiet: + logging.info('---- MODE: {}'.format( os.environ['MODE'] )) + logging.info('---- CMAKI_PWD: {}'.format( os.environ['CMAKI_PWD'] )) + logging.info('---- CMAKI_INSTALL: {}'.format( os.environ['CMAKI_INSTALL'] )) + logging.info('---- rootdir: {}'.format(parameters.rootdir)) + logging.info('---- prefix: {}'.format(parameters.prefix)) + logging.info('---- cmakefiles: {}'.format(parameters.cmakefiles)) + logging.info('---- third_party_dir: {}'.format(parameters.third_party_dir)) + logging.info('---- blacklist: {}'.format(parameters.blacklist)) + logging.info('---- depends: {}'.format(parameters.depends)) + + + + # fetch remotes yaml + # i = 0 + # for package in parameters.packages: + # if package.startswith('github://'): + # repo = package[len('github://'):] + # utils.trymkdir('github') + # yml_file = os.path.join('github', '{}.yml'.format(repo.replace('/', '_'))) + # if os.path.isfile(yml_file): + # utils.tryremove(yml_file) + # try: + # download_from_url('https://raw.githubusercontent.com/{}/master/cmaki.yml'.format(repo), yml_file) + # except urllib2.HTTPError: + # logging.error('not found cmaki.yml in {}'.format(package)) + # sys.exit(1) + # parameters.packages[i] = repo.split('/')[1] + # i += 1 + + prepare_cmakefiles(parameters.cmakefiles) + + # generate amalgaimation yaml + amalgamation_yaml(parameters.rootdir, parameters.yaml) + + # load yaml to python + with open(yaml_collapsed_final, 'rt') as fy: + third_parties_data_yaml = yaml.load(fy, Loader) + + # generate list of tuples (key, parameters) + count = 0 + third_parties_data = [] + for third in third_parties_data_yaml['third_parties']: + for key in third: + parms = third[key] + third_parties_data.append( (key, parms) ) + count += 1 + + logging.info('Found {} packages.'.format(count)) + logging.info('Package requested: {}'.format(parameters.packages)) + + if count == 1 and (len(parameters.packages) == 0): + parameters.packages = [ third_parties_data[0][0] ] + + # create nodes and choose selected by filter and mask + nodes = [] + selected = [] + for key, parms in third_parties_data: + node = ThirdParty(parameters, key, parms) + # define variables for unused projects + package = node.get_package_name() + + # fill compiler_replace_maps + node.apply_replace_maps(compiler_replace_maps) + + if (node.is_valid() + and (parameters.priority is None or (parameters.priority == node.get_priority())) + and (parameters.no_priority is None or (parameters.no_priority != node.get_priority()))): + nodes.append( (key, node) ) + if (parameters.packages == ['.'] or parameters.packages == ['*']): + selected.append( (key, node) ) + elif ((parameters.packages == ['all']) and (not node.get_exclude_from_all())): + selected.append( (key, node) ) + else: + for exp in parameters.packages: + if fnmatch.fnmatch(key.lower(), exp.lower()): + selected.append( (key, node) ) + + logging.info('Selected {} packages.'.format(len(selected))) + + # create relations + for key, parms in third_parties_data: + try: + depends = parms['depends'] + mask = parms['mask'] + # depends valid + valid = is_valid(key, mask) + # depends blacklisted + blacklisted = is_blacklisted(parameters.blacklist, parameters.no_blacklist, key) + if (depends is not None) and valid and (not blacklisted): + for depend in depends: + nodes_key = search_nodes_by_key(nodes, key) + nodes_depend = search_nodes_by_key(nodes, depend) + for nk in nodes_key: + for nd in nodes_depend: + nk.needs(nd) + except KeyError: + # no need create relations + pass + + + # 1/7: Generate solutions in each node + solutions = [] + for key, select_node in selected: + resolved = [] + if not parameters.build_only: + select_node.resolver(resolved, []) + solutions.append( resolved ) + else: + solutions.append( [select_node] ) + + + # 2/7: clean subset + groups = clean_subset(solutions) + + + # 3/7: merge solutions with same root + sols3 = {} + for packages in groups: + first = packages[0] + if first not in sols3: + sols3[first] = [] + chunk = sols3[first] + for node in packages: + if node != first: + if node not in chunk: + chunk.append(node) + + + # 4/7: write final plan + groups = [] + for key, value in sols3.items(): + newsolution = [key] + for node in value: + newsolution.append(node) + groups.append(newsolution) + + + # 5/7: clean subset + groups = clean_subset(groups) + + # 6/7: sort groups + groups_ordered = [] + for packages in groups: + priority_total = 0 + for node in packages: + priority_total += node.get_priority() + priority_group = (priority_total / len(packages)) + groups_ordered.append( (priority_group, packages) ) + groups_ordered.sort(key=lambda tup: tup[0], reverse=False) + + # 7/7: validate groups + for priority_total, packages in groups_ordered: + if len(packages) > 0: + priority_initial = packages[0].get_priority() + for node in packages: + if priority_initial != node.get_priority(): + logging.error('[ERROR] You are mixing packages of different layers.') + logging.error('Invalid priority (%d) in package %s, expected %d:' % (node.get_priority(), node.get_package_name(), priority_initial)) + logging.error('Any in group have bad depends:') + for node in packages: + sys.stdout.write('%s, ' % node.get_package_name()) + sys.stdout.write('\n') + sys.exit(1) + + # show groups in --plan + if len(groups_ordered) > 0: + priority_prev = groups_ordered[0][0] + i = 0 + for priority_total, packages in groups_ordered: + if parameters.quiet: + j = 0 + for node in packages: + sys.stdout.write("%s" % node.get_package_name()) + if ((len(packages)-1) != j): + sys.stdout.write(";") + j += 1 + sys.stdout.write('\n') + else: + if (priority_total > priority_prev) or (i == 0): + if priority_total in alias_priority_name: + layer_name = alias_priority_name[priority_total] + else: + layer_name = '%d' % priority_total + sys.stdout.write('\nLayer: %s\n\n' % layer_name) + sys.stdout.write("\t[") + j = 0 + for node in packages: + sys.stdout.write("%s" % node.get_package_name()) + if ((len(packages)-1) != j): + sys.stdout.write(", ") + j += 1 + sys.stdout.write("]") + sys.stdout.write('\n') + + priority_prev = priority_total + i += 1 + sys.stdout.write('\n') + sys.stdout.flush() + else: + logging.warning('No results.') + # with --plan flag is like use --dry-run + if parameters.plan: + sys.exit(0) + + try: + rets = OrderedDict() + unittests = OrderedDict() + skipping_if_priority_gt = 999 + announce_once = False + # + # pipeline: prepare, compile, packing, run_tests + # + for priority_group, packages in groups_ordered: + + if priority_group > skipping_if_priority_gt: + if not announce_once: + logging.error("ignoring group because some previous group are failing:") + logging.warning('\tgroup is formed by:') + announce_once = True + else: + logging.warning('') + for node in packages: + logging.warning(' -- %s' % node.get_package_name()) + continue + + if len(packages) > 1: + logging.info('--- Start group ---') + for node in packages: + logging.info('- %s' % node.get_package_name()) + # prepare include scripts + node.generate_scripts_headers(compiler_replace_maps) + + try: + if not parameters.no_purge: + run_purge(packages) + + # create pipeline + p = pipeline.make_pipe() + + # feed third parties + p = pipeline.feed(packages)(p) + + if not parameters.no_prepare: + # download sources + p = pipeline.do(prepare, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_compilation: + # ./configure && make (configuration and compilation) + p = pipeline.do(compilation, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_packing: + # packing (generate .tar.gz) + p = pipeline.do(packing, False, parameters, compiler_replace_maps)(p) + + if not parameters.no_run_tests: + # execute unittests and save results in "unittests" + p = pipeline.do(run_tests, False, parameters, compiler_replace_maps, unittests)(p) + + if not parameters.no_upload: + # upload artifacts + p = pipeline.do(upload, False, parameters, compiler_replace_maps)(p) + + # save results in "rets" + p = get_return_code(parameters, rets)(p) + + # close pipe + pipeline.end_pipe()(p) + + except FailThirdParty as e: + skipping_if_priority_gt = priority_group + logging.error("stopping full group.") + + except exceptions_fail_group: + logging.warning('Fatal exception in group:') + for node in packages: + logging.warning('-- %s' % node.get_package_name()) + + finally: + # only purge when you are executing a full group + if (not parameters.build_only) and (not parameters.no_purge): + if parameters.purge_if_fail: + run_purge(packages) + else: + # purge only if all packages are ok + ret = 0 + for node in packages: + ret += node.ret + + if ret == 0: + run_purge(packages) + else: + if len(packages) > 1: + logging.warning('Any in group is failing. No purge next group:') + for node in packages: + logging.warning(' %s' % node.get_package_name()) + else: + logging.warning('No purge %s because finished with fail' % node.get_package_name()) + + except exceptions_fail_program: + logging.warning('Force explicit exit ...') + finally: + ret = show_results(parameters, groups_ordered, rets, unittests) + sys.exit(ret) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py new file mode 100644 index 0000000..4ab073a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py @@ -0,0 +1,233 @@ +import os +import sys +import logging +import argparse +from requests import get # to make GET request +from io import StringIO +import csv +import utils +import functools + +version_separator = '.' +version_count_max = 4 + + +# def read_remote_csv(url): +# fp = urllib.request.urlopen(url) +# mybytes = fp.read() +# content = mybytes.decode("utf8") +# fp.close() +# return content + + +def read_remote_csv(url): + response = get(url) + response = response.content.decode("utf8") + return response + + +def version_to_tuple(version_str): + try: + if (version_str is not None) and (len(version_str) > 0): + count = len(version_str.split(version_separator)) + list_data = [int(x) for x in version_str.split(version_separator)] + zeros = [0 for x in range(version_count_max - count)] + list_data.extend(zeros) + return tuple(list_data) + else: + return None + except ValueError: + return None + + +class package(object): + def __init__(self, name, version, local): + self._name = name + self._version = version_to_tuple(version) + self._local = local + + def __repr__(self): + if self._version is not None: + list_version = list(self._version) + list_version = [str(x) for x in list_version] + join_version = version_separator.join(list_version) + else: + join_version = "last" + return "%s;%s" % (self._name, join_version) + + def __eq__(self, other): + return (self._name == other._name) or (self._name == '.') or (other._name == '.') + + def __ne__(self, other): + return not self.__eq__(other) + + def is_same_version(self, other): + return self._version == other._version + + def get_name(self): + return self._name + + def get_version(self): + return self._version + + def is_local(self): + return self._local + + +def sort_versions(local_swap): + if not local_swap: + one = 1 + else: + one = -1 + + def cmp(a, b): + if a.get_version() < b.get_version(): + return 1 + elif a.get_version() > b.get_version(): + return -1 + else: + if a.is_local() and not b.is_local(): + return -one + elif a.is_local() and b.is_local(): + return one + elif not a.is_local() and b.is_local(): + return one + else: + return one + return cmp + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--artifacts', dest='artifacts', help='3rdparty path with cmakefiles', default=None) + parser.add_argument('--server', dest='server', help='artifact server', default=None) + """ + Existe un valor especial de name ".". Sirve para hacer un listado de todos los artefactos + """ + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + """ + La version fijada tiene la siguiente prioridad: + - Version fijada mediante parametros + - Version fijada mediante fichero de dependencias + - Version ultima + """ + parser.add_argument('--version', dest='version', help='version package fixed', default=None) + # TODO: packagename-1.0.0.0-windows_32-msvc_2015-debug + # --platform deberia filtrar artefactos compatibles con "MI PLATAFORMA" + parser.add_argument('--platform', dest='platform', help='platform specified', default=None) + # --compiler deberia filtrar artefactos compatibles con "MI COMPILADOR" + parameters = parser.parse_args() + + package_request = package(parameters.name, parameters.version, True) + packages_found = [] + + if parameters.artifacts is not None: + # local + utils.trymkdir(parameters.artifacts) + for path in os.listdir(parameters.artifacts): + full_path = os.path.join(parameters.artifacts, path) + # directorios que contengan "-" + if os.path.isdir(full_path) and (full_path.find('-') != -1): + basename = os.path.basename(full_path) + try: + separator = basename.rindex('-') + package_name = basename[:separator] + package_version = basename[separator+1:] + new_package = package(package_name, package_version, True) + if new_package == package_request: + packages_found.append(new_package) + except ValueError: + pass # happen with 3rdpartyversions + + """ + Buscar paquetes recien generados + """ + if parameters.artifacts is not None: + # local + basename = None + for path in os.listdir(parameters.artifacts): + full_path = os.path.join(parameters.artifacts, path) + terminator = '-cmake.tar.gz' + if os.path.isfile(full_path) and (full_path.endswith(terminator)): + if parameters.platform is None: + logging.error('Platform is needed!') + sys.exit(1) + terminator = '-%s-cmake.tar.gz' % parameters.platform + basename = os.path.basename(full_path) + try: + if basename is not None: + separator = basename.rindex(terminator) + basename = basename[:separator] + separator = basename.rindex('-') + package_name = basename[:separator] + package_version = basename[separator+1:] + new_package = package(package_name, package_version, True) + if new_package == package_request: + packages_found.append(new_package) + except ValueError: + # not found platform in file + pass + + + if parameters.server is not None: + try: + if not parameters.server.endswith('?quiet'): + parameters.server = parameters.server + '/' + '?quiet' + csv_content = read_remote_csv(parameters.server) + reader = csv.reader(StringIO(csv_content), delimiter=';') + i = 0 + for row in reader: + if len(row) >= 2: + if i > 0: + package_name = row[0] + package_version = row[1] + package_platform = row[2] + new_package = package(package_name, package_version, False) + if (parameters.platform is None) or (parameters.platform == package_platform): + if new_package == package_request: + packages_found.append(new_package) + i += 1 + except IOError: + logging.debug('error in cache artifacts: %s' % parameters.server) + + + if len(packages_found) > 0: + + if parameters.version is None: + """ + Cuando no hay version, ordeno de mayor a menor. + Al pasar False al comparador aparece primero local y luego remote en caso de ser la misma version. + Selecciona el primero y sale. + """ + for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(False))): + if package_request.is_same_version(package): + print("EXACT;%s;%s" % (package, package.get_version())) + else: + print("COMPATIBLE;%s;%s" % (package, package.get_version())) + if parameters.name != '.': + sys.exit(0) + else: + """ + Cuando se especifica una version minima + Se ordena a la inversa, es decir de menor a mayor. + Se coge el primer paquete que cumple la restriccion de version. + Al pasar True al comparador hace que en caso de empate se mantenga a pesar del reverse que + aparece primero versiones locales y luego las remotas. + """ + for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(True)), reverse=True): + if package.get_version() >= package_request.get_version(): + if package_request.is_same_version(package): + print("EXACT;%s;%s" % (package, package.get_version())) + else: + print("COMPATIBLE;%s;%s" % (package, package.get_version())) + if parameters.name != '.': + sys.exit(0) + else: + print("UNSUITABLE;;") + sys.exit(1) + +# if __name__ == '__main__': +# csv_content = read_remote_csv('http://localhost:8080') +# reader = csv.reader(StringIO(csv_content), delimiter=';') +# print(list(reader)) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml new file mode 100644 index 0000000..11a2c76 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml @@ -0,0 +1,498 @@ +compilation_environments: &compilation_environments + windows_32-msvc_msc_ver_*-*: + generator: "Visual Studio 15 2017" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_msc_ver_*-*: + generator: "Visual Studio 15 2017 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-msvc_2015-*: + generator: "Visual Studio 14 2015" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_2015-*: + generator: "Visual Studio 14 2015 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-msvc_2017-*: + generator: "Visual Studio 15 2017" + ext_dyn: dll + ext_sta: lib + windows_64-msvc_2017-*: + generator: "Visual Studio 15 2017 Win64" + ext_dyn: dll + ext_sta: lib + windows_32-gcc_4-*: + generator: "Unix Makefiles" + ext_dyn: dll.a + ext_sta: a + windows_64-gcc_4-*: + generator: "Unix Makefiles" + ext_dyn: dll.a + ext_sta: a + linux_*_glibc_2.*-*_*-*: + generator: "Unix Makefiles" + ext_dyn: so + ext_sta: a + macos_64-clang_*-*: + generator: "Unix Makefiles" + ext_dyn: dylib + ext_sta: a + android_arm_api_*-gcc_*-*: + generator: "Unix Makefiles" + ext_dyn: so + ext_sta: a + +thirdparty_defaults: &thirdparty_defaults + platforms: + <<: *compilation_environments + version: null + version_manager: git + mask: wlmea + mode: dri + depends: null + source: skip + packing: true + build_windows: + | + @echo off + set CMAKI_INSTALL=%SELFHOME% + npm install + unittest: + | + int main() { return 0; } + priority: 30 + +library_dynamic: &library_dynamic + common: &library_dynamic_common + include: + - $PLATFORM/include + - include + windows: &library_dynamic_windows + <<: *library_dynamic_common + dynamic: + debug: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + relwithdebinfo: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + release: + dll: + /*$TARGET*.dll/ + lib: + /*$TARGET*.lib/ + pdb: + /*$TARGET*.pdb/ + + unix: &library_dynamic_unix + <<: *library_dynamic_common + dynamic: + debug: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + relwithdebinfo: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + release: + so: + - /lib*$TARGET*.$EXT_DYN/ + - /*$TARGET*.$EXT_DYN/ + windows_*-msvc_*-*: + <<: *library_dynamic_windows + default: + <<: *library_dynamic_unix + +executable: &executable + windows: &executable_windows_common + executable: + release: + bin: + /*${TARGET}.exe/ + unix: &executable_unix_common + executable: + release: + bin: + /*${TARGET}/ + windows_*-msvc_*-*: + <<: *executable_windows_common + default: + <<: *executable_unix_common + +executable_exact: &executable_exact + windows: &executable_exact_windows_common + executable: + release: + bin: + - ${TARGET}.exe + - bin/${TARGET}.exe + - dll/${TARGET}.exe + debug: + bin: + - ${TARGET}.exe + - bin/${TARGET}.exe + - dll/${TARGET}.exe + unix: &executable_exact_unix_common + executable: + release: + bin: + - $TARGET + - bin/$TARGET + - dll/$TARGET + debug: + bin: + - $TARGET + - bin/$TARGET + - dll/$TARGET + windows_*-msvc_*-*: + <<: *executable_exact_windows_common + default: + <<: *executable_exact_unix_common + +library_dynamic_exact: &library_dynamic_exact + common: &library_dynamic_exact_common + include: + - $PLATFORM/include + - include + windows: &library_dynamic_exact_windows + <<: *library_dynamic_exact_common + dynamic: + debug: + dll: + - ${TARGET}d.dll + - bin/${TARGET}d.dll + - Debug/${TARGET}d.dll + - dll/${TARGET}d.dll + - ${TARGET}_D.dll + - bin/${TARGET}_D.dll + - Debug/${TARGET}_D.dll + - dll/${TARGET}_D.dll + - $TARGET.dll + - bin/$TARGET.dll + - Debug/$TARGET.dll + - dll/$TARGET.dll + lib: + - ${TARGET}d.lib + - lib/${TARGET}d.lib + - bin/${TARGET}d.lib + - Debug/${TARGET}d.lib + - dll/${TARGET}d.lib + - ${TARGET}_D.lib + - lib/${TARGET}_D.lib + - bin/${TARGET}_D.lib + - Debug/${TARGET}_D.lib + - dll/${TARGET}_D.lib + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - Debug/$TARGET.lib + - dll/$TARGET.lib + pdb: + - ${TARGET}d.pdb + - pdb/${TARGET}d.pdb + - bin/${TARGET}d.pdb + - Debug/${TARGET}d.pdb + - dll/${TARGET}d.pdb + - ${TARGET}_D.pdb + - pdb/${TARGET}_D.pdb + - bin/${TARGET}_D.pdb + - Debug/${TARGET}_D.pdb + - dll/${TARGET}_D.pdb + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - Debug/$TARGET.pdb + - dll/$TARGET.pdb + relwithdebinfo: + dll: + - $TARGET.dll + - bin/$TARGET.dll + - RelWithDebInfo/$TARGET.dll + - dll/$TARGET.dll + lib: + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - RelWithDebInfo/$TARGET.lib + - dll/$TARGET.lib + pdb: + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - RelWithDebInfo/$TARGET.pdb + - dll/$TARGET.pdb + release: + dll: + - $TARGET.dll + - bin/$TARGET.dll + - Release/$TARGET.dll + - dll/$TARGET.dll + lib: + - $TARGET.lib + - lib/$TARGET.lib + - bin/$TARGET.lib + - Release/$TARGET.lib + - dll/$TARGET.lib + pdb: + - $TARGET.pdb + - pdb/$TARGET.pdb + - bin/$TARGET.pdb + - Release/$TARGET.pdb + - dll/$TARGET.pdb + + unix: &library_dynamic_exact_unix + <<: *library_dynamic_exact_common + dynamic: + debug: + so: + - Debug/lib${TARGET}d.$EXT_DYN + - Debug/lib${TARGET}_D.$EXT_DYN + - Debug/lib${TARGET}_debug.$EXT_DYN + - Debug/lib${TARGET}-d.$EXT_DYN + - Debug/lib${TARGET}.$EXT_DYN + ##################### + - bin/lib${TARGET}d.$EXT_DYN + - bin/lib${TARGET}_D.$EXT_DYN + - bin/lib${TARGET}_debug.$EXT_DYN + - bin/lib${TARGET}-d.$EXT_DYN + - bin/lib${TARGET}.$EXT_DYN + ##################### + - lib/lib${TARGET}d.$EXT_DYN + - lib/lib${TARGET}_D.$EXT_DYN + - lib/lib${TARGET}_debug.$EXT_DYN + - lib/lib${TARGET}-d.$EXT_DYN + - lib/lib${TARGET}.$EXT_DYN + ##################### + - lib${ARCH}/lib${TARGET}d.$EXT_DYN + - lib${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib${ARCH}/lib${TARGET}.$EXT_DYN + ##################### + - lib${TARGET}d.$EXT_DYN + - lib${TARGET}_D.$EXT_DYN + - lib${TARGET}_debug.$EXT_DYN + - lib${TARGET}-d.$EXT_DYN + - lib${TARGET}.$EXT_DYN + ###################### + - lib/${ARCH}/lib${TARGET}d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}.$EXT_DYN + relwithdebinfo: + so: + - RelWithDebInfo/lib${TARGET}d.$EXT_DYN + - RelWithDebInfo/lib${TARGET}_D.$EXT_DYN + - RelWithDebInfo/lib${TARGET}_debug.$EXT_DYN + - RelWithDebInfo/lib${TARGET}-d.$EXT_DYN + - RelWithDebInfo/lib${TARGET}.$EXT_DYN + ##################### + - bin/lib${TARGET}d.$EXT_DYN + - bin/lib${TARGET}_D.$EXT_DYN + - bin/lib${TARGET}_debug.$EXT_DYN + - bin/lib${TARGET}-d.$EXT_DYN + - bin/lib${TARGET}.$EXT_DYN + ##################### + - lib/lib${TARGET}d.$EXT_DYN + - lib/lib${TARGET}_D.$EXT_DYN + - lib/lib${TARGET}_debug.$EXT_DYN + - lib/lib${TARGET}-d.$EXT_DYN + - lib/lib${TARGET}.$EXT_DYN + ##################### + - lib${ARCH}/lib${TARGET}d.$EXT_DYN + - lib${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib${ARCH}/lib${TARGET}.$EXT_DYN + ##################### + - lib${TARGET}d.$EXT_DYN + - lib${TARGET}_D.$EXT_DYN + - lib${TARGET}_debug.$EXT_DYN + - lib${TARGET}-d.$EXT_DYN + - lib${TARGET}.$EXT_DYN + ###################### + - lib/${ARCH}/lib${TARGET}d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN + - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN + - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN + - lib/${ARCH}/lib${TARGET}.$EXT_DYN + release: + so: + - Release/lib$TARGET.$EXT_DYN + - bin/lib$TARGET.$EXT_DYN + - lib/lib$TARGET.$EXT_DYN + - lib${ARCH}/lib$TARGET.$EXT_DYN + - lib$TARGET.$EXT_DYN + - lib/${ARCH}/lib$TARGET.$EXT_DYN + windows_*-msvc_*-*: + <<: *library_dynamic_exact_windows + default: + <<: *library_dynamic_exact_unix + +library_static: &library_static + common: &library_static_common + include: + - $PLATFORM/include + - include + static: + debug: + lib: + /*$TARGET*.$EXT_STA/ + relwithdebinfo: + lib: + /*$TARGET*.$EXT_STA/ + release: + lib: + /*$TARGET*.$EXT_STA/ + windows_*-msvc_*-*: + <<: *library_static_common + default: + <<: *library_static_common + +library_static_exact: &library_static_exact + common: &library_static_exact_common + include: + - $PLATFORM/include + - include + static: + debug: + lib: + - Debug/lib${TARGET}d.$EXT_STA + - Debug/lib${TARGET}-d.$EXT_STA + - Debug/lib${TARGET}.$EXT_STA + - Debug/${TARGET}d.$EXT_STA + - Debug/${TARGET}-d.$EXT_STA + - Debug/${TARGET}.$EXT_STA + ################ + - lib${TARGET}d.$EXT_STA + - lib${TARGET}-d.$EXT_STA + - lib${TARGET}.$EXT_STA + - ${TARGET}d.$EXT_STA + - ${TARGET}-d.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}d.$EXT_STA + - lib/lib${TARGET}-d.$EXT_STA + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}d.$EXT_STA + - lib/${TARGET}-d.$EXT_STA + - lib/${TARGET}.$EXT_STA + relwithdebinfo: + lib: + - RelWithDebInfo/lib${TARGET}d.$EXT_STA + - RelWithDebInfo/lib${TARGET}-d.$EXT_STA + - RelWithDebInfo/lib${TARGET}.$EXT_STA + - RelWithDebInfo/${TARGET}d.$EXT_STA + - RelWithDebInfo/${TARGET}-d.$EXT_STA + - RelWithDebInfo/${TARGET}.$EXT_STA + ################ + - lib${TARGET}d.$EXT_STA + - lib${TARGET}-d.$EXT_STA + - lib${TARGET}.$EXT_STA + - ${TARGET}d.$EXT_STA + - ${TARGET}-d.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}d.$EXT_STA + - lib/lib${TARGET}-d.$EXT_STA + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}d.$EXT_STA + - lib/${TARGET}-d.$EXT_STA + - lib/${TARGET}.$EXT_STA + release: + lib: + - Release/lib${TARGET}.$EXT_STA + - Release/${TARGET}.$EXT_STA + ################ + - lib${TARGET}.$EXT_STA + - ${TARGET}.$EXT_STA + ################ + - lib/lib${TARGET}.$EXT_STA + - lib/${TARGET}.$EXT_STA + windows_*-msvc_*-*: + <<: *library_static_exact_common + default: + <<: *library_static_exact_common + +# when need distribute dll (only windows) but dont need linking +library_only_dll: &library_only_dll + windows: &library_only_dll_windows + add_3rdparty_dependencies: false + lib_provided: false + dynamic: + debug: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + relwithdebinfo: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + release: + dll: + /*$TARGET*.dll/ + pdb: + /*$TARGET*.pdb/ + +library_dynamic_boost: &library_dynamic_boost + common: &common_boost + include: + - $PLATFORM/include + - include + definitions: + - -D${PACKAGE_UPPER}_DYN_LINK + windows: &windows_dynamic_boost + <<: *common_boost + dynamic: + debug: + dll: + /$TARGET-*-mt-*d-*_*.dll/ + lib: + /$TARGET-*-mt-*d-*_*.lib/ + pdb: + null + relwithdebinfo: + dll: + /$TARGET-*-mt-*_*.dll/ + lib: + /$TARGET-*-mt-*_*.dll/ + pdb: + null + release: + dll: + /$TARGET-*-mt-*_*.dll/ + lib: + /$TARGET-*-mt-*_*.lib/ + pdb: + null + unix: &unix_dynamic_boost + <<: *common_boost + dynamic: + debug: + so: + /lib$TARGET-*-mt-*d-*_*.$EXT_DYN/ + relwithdebinfo: + so: + /lib$TARGET-*-mt-*_*.$EXT_DYN/ + release: + so: + /lib$TARGET-*-mt-*_*.$EXT_DYN/ + windows_*-msvc_*-*: + <<: *windows_dynamic_boost + default: + <<: *unix_dynamic_boost + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py new file mode 100644 index 0000000..b80af0f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py @@ -0,0 +1,238 @@ +import os +import utils +import logging +import pipeline +from itertools import product +from third_party import platforms +from third_party import CMAKELIB_URL +from third_party import get_identifier + + +def search_cmakelib(): + # compilando desde cmaki_generator + cmakelib_dir = os.path.join('output', '3rdparties', 'cmaki') + if not os.path.isdir(cmakelib_dir): + # compilando una dependencia + cmakelib_dir = os.path.join('..', 'cmaki') + if not os.path.isdir(cmakelib_dir): + # compilando proeycto raiz + cmakelib_dir = os.path.join('node_modules', 'cmaki') + if not os.path.isdir(cmakelib_dir): + raise Exception("not found cmaki: {}".format(os.path.abspath(cmakelib_dir))) + return os.path.abspath(cmakelib_dir) + + +def compilation(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + package_norm = node.get_package_name_norm() + version = node.get_version() + + cmake3p_dir = parameters.prefix + cmake3p_dir = utils.get_norm_path(cmake3p_dir) + cmake3p_dir = cmake3p_dir.replace('\\', '/') + + cmakefiles_dir = search_cmakelib() + + package_upper = node.get_package_name_norm_upper() + parms = node.parameters + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + install_directory = node.get_install_directory(plat) + utils.trymkdir(install_directory) + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + utils.trymkdir(build_directory) + with utils.working_directory(build_directory): + # get generator and platform info + for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): + + logging.info('-- compilation mode: %s plat: %s' % (build_mode, plat)) + + ############# 1. prepare vars + + if build_mode.lower() == 'debug': + try: + env_modified['CFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' + elif build_mode.lower() == 'relwithdebinfo': + try: + env_modified['CFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-g -O2 -DNDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-g -O2 -DNDEBUG' + elif build_mode.lower() == 'release': + # default packages assume came in release + try: + env_modified['CFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CFLAGS'] + except KeyError: + env_modified['CFLAGS'] = '-O3 -DNDEBUG' + try: + env_modified['CPPFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CPPFLAGS'] + except KeyError: + env_modified['CPPFLAGS'] = '-O3 -DNDEBUG' + + cores = utils.detect_ncpus() + half_cores = cores / 2 + env_modified['CORES'] = str(cores) + env_modified['HALF_CORES'] = str(half_cores) + env_modified['GTC_PREFIX'] = parameters.prefix + env_modified['CMAKELIB_URL'] = CMAKELIB_URL + env_modified['BUILD_MODE'] = str(build_mode) + # env_modified['NPP_SERVER'] = ... + env_modified['SOURCES'] = os.path.abspath(os.path.join('..', node.get_download_directory())) + env_modified['CMAKI_DIR'] = cmakefiles_dir + env_modified['SELFHOME'] = install_directory + env_modified['CMAKI_PWD'] = build_directory + env_modified['CMAKI_INSTALL'] = install_directory + + ################# + # remove cmake3p of node + node.remove_cmake3p(cmake3p_dir) + + # show env vars + node.show_environment_vars(env_modified) + + # remove CMakeCache.txt for avoid problems when + # change of generator + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + ################# + + generator_extra = '' + if generator is not None: + generator_extra = '-G"%s"' % generator + + cmakefiles_dir = parameters.cmakefiles + cmakefiles_dir = cmakefiles_dir.replace('\\', '/') + + cmake_prefix_path = parameters.third_party_dir + cmake_prefix_path = cmake_prefix_path.replace('\\', '/') + + build_directory = build_directory.replace('\\', '/') + + # resolve replace maps + compiler_replace_resolved = {} + for var, value in compiler_replace_maps.items(): + newvalue = value + newvalue = newvalue.replace('$PLATFORM', plat) + compiler_replace_resolved[var] = newvalue + + # begin definitions cmake + try: + cmake_definitions_list_original = parms['cmake_definitions'] + cmake_definitions_list = [] + for define in cmake_definitions_list_original: + # TODO: resolver tus variables directas e indirectas (de dependencias) + define = define.replace('$%s_HOME' % package_norm, install_directory) + # apply replaces + cmake_definitions_list.append( utils.apply_replaces(define, compiler_replace_resolved) ) + except KeyError: + cmake_definitions_list = [] + + # add cflags and cppflags to cmake_definitions + try: + cmake_definitions_list.append( 'CMAKE_C_FLAGS="%s"' % env_modified['CFLAGS'] ) + except KeyError: + pass + try: + cmake_definitions_list.append( 'CMAKE_CXX_FLAGS="%s"' % env_modified['CPPFLAGS'] ) + except KeyError: + pass + + definitions_extra = '' + for definition in cmake_definitions_list: + definitions_extra += ' -D%s' % definition + # end definitions cmake + + if (not 'CMAKE_TOOLCHAIN_FILE' in env_modified) or (not env_modified['CMAKE_TOOLCHAIN_FILE']) or (env_modified['CMAKE_TOOLCHAIN_FILE'] == "no cross compile"): + cmake_toolchain_file_filepath='' + else: + cmake_toolchain_file_filepath=' -DCMAKE_TOOLCHAIN_FILE="{}"'.format(env_modified['CMAKE_TOOLCHAIN_FILE']) + + cmake_prefix = node.get_cmake_prefix() + cmake_configure = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKE_MODULE_PATH=%s -DCMAKI_PATH=%s -DCMAKE_BUILD_TYPE=%s -DCMAKE_PREFIX_PATH=%s -DPACKAGE=%s -DPACKAGE_UPPER=%s -DPACKAGE_VERSION=%s -DPACKAGE_BUILD_DIRECTORY=%s -DCMAKI_COMPILER=%s -DCMAKI_IDENTIFIER=%s -DCMAKI_PLATFORM=%s %s %s' % (generator_extra, cmake_prefix, cmake3p_dir, cmakefiles_dir, cmakefiles_dir, build_mode, cmake_prefix_path, package, package_upper, version, build_directory, get_identifier('COMPILER'), get_identifier('ALL'), get_identifier('ALL'), definitions_extra, cmake_toolchain_file_filepath) + + target = node.get_cmake_target() + if target is not None: + cmake_build = 'cmake --build . --target %s --config %s' % (target, build_mode) + else: + cmake_build = 'cmake --build . --config %s' % build_mode + + env_modified['CMAKE_CONFIGURE'] = cmake_configure.replace(r'"', r"'") + env_modified['CMAKE_BUILD'] = cmake_build.replace(r'"', r"'") + + ########## 2. execute + + executed_build_script = False + if utils.is_windows(): + for build_script in ['.build.cmd', 'build.cmd']: + if os.path.exists(build_script): + # execute manual build script + node.ret += abs(utils.safe_system('%s %s %s %s %s %s' % (build_script, install_directory, package, version, plat, build_mode), env=env_modified)) + executed_build_script = True + else: + for build_script in ['.build.sh', 'build.sh']: + if os.path.exists(build_script): + # show vars + node.show_environment_vars(env_modified) + + node.ret += abs(utils.safe_system('chmod +x %s && ./%s %s %s %s %s %s' % (build_script, build_script, install_directory, package, version, plat, build_mode), env=env_modified)) + executed_build_script = True + + if not executed_build_script: + logging.debug('configure command: %s' % cmake_configure) + + ret = utils.safe_system(cmake_configure, env=env_modified) + if ret == 0: + logging.debug('build command: %s' % cmake_configure) + node.ret += abs(utils.safe_system(cmake_build, env=env_modified)) + else: + logging.warning('Configuration failed. See log: %s' % parameters.log) + node.ret += abs(ret) + + ######## 3. manual install + + # post-install + logging.debug('begin post-install') + for bc in node.get_post_install(): + chunks = [x.strip() for x in bc.split(' ') if x] + if(len(chunks) != 2) and (len(chunks) != 3): + raise Exception('Invalid value in post_install: %s. Expected [source pattern destiny]' % bc) + + source_folder = os.path.join(build_directory, os.path.dirname(chunks[0])) + install_directory_chunk = os.path.join(install_directory, chunks[1]) + pattern = os.path.basename(chunks[0]) + logging.debug('copy %s/%s to %s' % (source_folder, pattern, install_directory_chunk)) + + # create directory if not exists + utils.trymkdir(install_directory_chunk) + + p = pipeline.make_pipe() + # begin + if len(chunks) == 3: + p = pipeline.find(source_folder, 99)(p) + else: + p = pipeline.find(source_folder, 0)(p) + p = pipeline.grep_basename(pattern)(p) + p = pipeline.copy(source_folder, install_directory_chunk)(p) + p = pipeline.debug('copied ')(p) + # end + pipeline.end_pipe()(p) + logging.debug('end post-install') + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "compilation"') + break + + # finish well + return True diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py new file mode 100644 index 0000000..23fc656 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py @@ -0,0 +1,11 @@ +from requests import get # to make GET request + +def download_from_url(url, file_name): + with open(file_name, "wb") as file: + response = get(url) + file.write(response.content) + +url = 'http://localhost:8080/cpp/download.php?file=json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz' + +print( download_from_url(url, "json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz") ) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py new file mode 100755 index 0000000..e450ee0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py @@ -0,0 +1,26 @@ +import os +import sys +import logging +import argparse +import urllib +import csv +import utils + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) + parameters = parser.parse_args() + + depends_file = parameters.depends + if os.path.exists(depends_file): + data = utils.deserialize(depends_file) + # data = utils.deserialize_json(depends_file) + else: + data = {} + if parameters.name in data: + print (data[parameters.name]) + sys.exit(0) + else: + sys.exit(1) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py new file mode 100644 index 0000000..c407dd7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py @@ -0,0 +1,35 @@ +import logging + + +def set_state(rets, key, value): + if not key in rets: + rets[key] = value + else: + logging.warning('Received in pipeline multiples packages with same name and version: %s' % key) + set_state(rets, key + '_', value) + + +def get_return_code(parameters, rets): + def process(packages): + for node in packages: + try: + # process package + name = node.get_package_name() + version = node.get_version() + + if len(node.exceptions) > 0: + state = "EXCEPTION in %s" % node.fail_stage + elif node.interrupted: + state = "INTERRUPTED in %s" % node.fail_stage + elif (node.ret != 0): + state = "FAILED in %s" % node.fail_stage + else: + state = "OK" + + key = '%s - %s' % (name, version) + set_state(rets, key, state) + finally: + # send to next step + yield node + return process + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt new file mode 100644 index 0000000..2d06137 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt @@ -0,0 +1,47 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) + +include_directories(gwen/include) + +#ADD_DEFINITIONS(-DGWEN_COMPILE_STATIC -D_HAS_EXCEPTIONS=0 -D_STATIC_CPPLIB) +ADD_DEFINITIONS(-DGWEN_COMPILE_DLL) + +IF(WIN32) + +ELSE() + add_definitions(-std=c++11) +ENDIF() + +file(GLOB SOURCE_CODE1 gwen/src/*.cpp) +file(GLOB SOURCE_CODE2 gwen/src/Controls/*.cpp) +file(GLOB SOURCE_CODE3 gwen/src/Controls/Dialog/*.cpp) +file(GLOB SOURCE_CODE4 gwen/src/Platforms/*.cpp) + +add_library(${PACKAGE} SHARED ${SOURCE_CODE1} ${SOURCE_CODE2} ${SOURCE_CODE3} ${SOURCE_CODE4}) + +file(GLOB HEADER_CODE1 gwen/include/Gwen/*.h) +INSTALL( FILES ${HEADER_CODE1} + DESTINATION "include/${PACKAGE}") + +file(GLOB HEADER_CODE2 gwen/include/Gwen/Controls/*.h) +INSTALL( FILES ${HEADER_CODE2} + DESTINATION "include/${PACKAGE}/Controls") + +file(GLOB HEADER_CODE3 gwen/include/Gwen/Controls/Dialog/*.h) +INSTALL( FILES ${HEADER_CODE3} + DESTINATION "include/${PACKAGE}/Controls/Dialog") + +file(GLOB HEADER_CODE4 gwen/include/Gwen/Input/*.h) +INSTALL( FILES ${HEADER_CODE4} + DESTINATION "include/${PACKAGE}/Input") + +file(GLOB HEADER_CODE5 gwen/include/Gwen/Renderers/*.h) +INSTALL( FILES ${HEADER_CODE5} + DESTINATION "include/${PACKAGE}/Renderers") + +file(GLOB HEADER_CODE6 gwen/include/Gwen/Skins/*.h) +INSTALL( FILES ${HEADER_CODE6} + DESTINATION "include/${PACKAGE}/Skins") + +INSTALL( FILES gwen/bin/DefaultSkin.png + DESTINATION "bin") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py new file mode 100644 index 0000000..f5e56cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py @@ -0,0 +1,172 @@ +import os +import contextlib +import utils +import time +from datetime import datetime +from utils import get_stdout +from email.utils import parsedate + + +def get_revision_svn(repo): + ''' + This command need svn in PATH + ''' + cmd = "svn info %s" % repo + for line in get_stdout(cmd): + if line.startswith('Last') or (line.startswith('Revisi') and (line.find('cambio') != -1)): + pos = line.rindex(':') + return int(line[pos+2:]) + return -1 + + +def get_timestamp_from_changeset(repo, changeset_searched): + ''' + generator of commits + ''' + with utils.working_directory(repo): + lines = [] + for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): + lines.append(line) + for line in reversed(lines): + chunks = line.split(";") + assert(len(chunks) == 2) + changeset = chunks[0] + timestamp = int(time.mktime(parsedate(chunks[1]))) + if changeset_searched == changeset: + return timestamp + raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched)) + + +def git_log_gen(repo, number=1, extra=''): + ''' + generator of commits + ''' + with utils.working_directory(repo): + for line in get_stdout('git log -%d %s' % (number, extra)): + if line.startswith('commit'): + parts = line.split(' ') + assert(len(parts) == 2) + commit_name = parts[1] + yield commit_name + + +def get_changeset_git_from_position(repo, position = 0): + with utils.working_directory(repo): + i = 1 + lines = [] + for line in get_stdout('git log'): + lines.append(line) + for line in reversed(lines): + if line.startswith('commit'): + parts = line.split(' ') + assert(len(parts) == 2) + commit_name = parts[1] + if i == position: + return commit_name + else: + i += 1 + raise Exception('Error in get git hash from position {}'.format(position)) + + +def get_changeset_from_timestamp(repo, timestamp_searched): + with utils.working_directory(repo): + lines = [] + for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): + lines.append(line) + for line in reversed(lines): + chunks = line.split(";") + assert(len(chunks) == 2) + changeset = chunks[0] + timestamp = int(time.mktime(parsedate(chunks[1]))) + if timestamp_searched == timestamp: + return changeset + raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched)) + + +def get_position_git_from_changeset(repo, changeset): + with working_directory(repo): + i = 1 + lines = [] + for line in get_stdout('git log'): + lines.append(line) + for line in reversed(lines): + if line.startswith('commit'): + parts = line.split(' ') + if len(parts) == 2: + commit_name = parts[1] + if commit_name == changeset: + return i + else: + i += 1 + return -1 + + +def get_last_changeset(repo, short=False): + for changeset in git_log_gen(repo, number=1): + if short: + return changeset[:7] + else: + return changeset + return "" + + +def get_last_version(repo): + return to_cmaki_version(repo, get_last_changeset(repo)) + + +def rehash_simple(commit_name, position): + separator = '000' + return int(separator.join(list(str(ord(character)) for character in commit_name))) % position + + +@contextlib.contextmanager +def working_directory(path): + prev_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +def to_cmaki_version(repo, changeset): + ''' + git hash ----> 0.0.x.x + ''' + position = get_timestamp_from_changeset(repo, changeset) + hash_simple = rehash_simple(changeset, position) + versions = [] + versions.append('0') + versions.append('0') + versions.append(str(position)) + versions.append(str(hash_simple)) + return '.'.join(versions) + + +def to_git_version(repo, version): + ''' + 0.0.x.x ----> git hash + ''' + version = version.split('.') + assert(len(version) == 4) + position = int(version[2]) + pseudohash = int(version[3]) + changeset = get_changeset_from_timestamp(repo, position) + hash_simple = rehash_simple(changeset, position) + assert( get_timestamp_from_changeset(repo, changeset) == position ) + assert( hash_simple == pseudohash ) + return changeset + + +if __name__ == '__main__': + + local_path = r'/home/ricardo/dev/fast-event-system' + + for commit_name in git_log_gen(local_path, 10): + cmaki_version = to_cmaki_version(local_path, commit_name) + print ("%s -> %s" % (commit_name, cmaki_version)) + commit_name2 = to_git_version(local_path, cmaki_version) + print ("%s -> %s" % (cmaki_version, commit_name2)) + print () + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl new file mode 100644 index 0000000..8ba21f4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl @@ -0,0 +1,120 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + BuildName: + BuildStamp: + Name: + Generator: + CompilerName: + OSName: + Hostname: + OSRelease: + OSVersion: + OSPlatform: + Is64Bits: + VendorString: + VendorID: + FamilyID: + ModelID: + ProcessorCacheSize: + NumberOfLogicalCPU: + NumberOfPhysicalCPU: + TotalVirtualMemory: + TotalPhysicalMemory: + LogicalProcessorsPerPhysical: + ProcessorClockFrequency: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md new file mode 100644 index 0000000..4f989c6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md @@ -0,0 +1,3 @@ +# Source +https://bitbucket.org/shackra/ctest-jenkins/ + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt new file mode 100644 index 0000000..bc1e512 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt @@ -0,0 +1,687 @@ +#=================================== +# Build script for libRocket ======= +#=================================== + +if(APPLE) + if(IOS_PLATFORM) + set(CMAKE_TOOLCHAIN_FILE cmake/Platform/iOS.cmake) + endif(IOS_PLATFORM) +endif(APPLE) + +# We use the new OSX_ARCHITECTURES property +# and GNUInstallDirs module +cmake_minimum_required(VERSION 2.8.5) + +if(COMMAND cmake_policy) + cmake_policy(SET CMP0015 NEW) +endif(COMMAND cmake_policy) + +project(libRocket C CXX) + +# paths +include(GNUInstallDirs) + +set(LIBROCKET_VERSION_MAJOR 1) +set(LIBROCKET_VERSION_MINOR 3) +set(LIBROCKET_VERSION_PATCH 0) +set(LIBROCKET_VERSION_TWEAK 0) +set(PROJECT_VERSION ${LIBROCKET_VERSION_MAJOR}.${LIBROCKET_VERSION_MINOR}.${LIBROCKET_VERSION_PATCH}.${LIBROCKET_VERSION_TWEAK}) + +# Search in the 'cmake' directory for additional CMake modules. +list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) + +# Old versions of CMake need some updated Modules, but we don't want +# to override newer versions of CMake which have working versions +if(CMAKE_MAJOR_VERSION LESS 3) + list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/v2fixes) +endif() + +#=================================== +# Environment tests ================ +#=================================== + +include(TestForANSIForScope) +include(TestForANSIStreamHeaders) +include(TestForSTDNamespace) + +#=================================== +# Provide hints as to where depends= +# might be found = +#=================================== + +if(NOT DEFINED ENV{FREETYPE_DIR}) + set(ENV{FREETYPE_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{Boost_DIR}) + set(ENV{Boost_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{LUA_DIR}) + set(ENV{LUA_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SDLDIR}) + set(ENV{SDLDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SDLIMAGEDIR}) + set(ENV{SDLIMAGEDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +if(NOT DEFINED ENV{SFML_ROOT}) + set(ENV{SFML_ROOT} "${PROJECT_SOURCE_DIR}/../Dependencies") +endif() + +#=================================== +# Plaform specific global hacks ==== +#=================================== + +if(APPLE) + # Disables naked builtins from AssertMacros.h which + # This prevents naming collisions such as those from the check() + # function macro with LuaType::check + add_definitions(-D__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0) +endif(APPLE) + +#=================================== +# Build options ==================== +#=================================== + +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE Release CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." + FORCE) +endif() + +if(NOT IOS) + option(BUILD_SHARED_LIBS "Build shared libraries" ON) +endif(NOT IOS) + +option(BUILD_PYTHON_BINDINGS "Build python bindings" OFF) +option(BUILD_LUA_BINDINGS "Build Lua bindings" OFF) +option(BUILD_SAMPLES "Build samples" OFF) +if(WIN32) + option(SKIP_DIRECTX_SAMPLES "Skip build of all DirectX related samples. Only applies if BUILD_SAMPLES is ON" OFF) + option(SKIP_DIRECTX9_SAMPLE "Skip build of DirectX 9 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) + option(SKIP_DIRECTX10_SAMPLE "Skip build of DirectX 10 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) +endif() + +if(IOS) + if(BUILD_SHARED_LIBS) + message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") + endif(BUILD_SHARED_LIBS) +endif(IOS) + +if(IOS) + if(BUILD_SHARED_LIBS) + message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") + endif(BUILD_SHARED_LIBS) +endif(IOS) + +if(NOT BUILD_SHARED_LIBS) + add_definitions(-DSTATIC_LIB) +endif() + +#on windows, check for VC10 and fix the multiple compile target issue. +IF(WIN32) + if(MSVC) + if(${MSVC_VERSION} STREQUAL 1600 OR ${MSVC_VERSION} STRGREATER 1600) + message("Visual Studio 2010 (${MSVC_VERSION}) build fix at play (/FORCE:MULTIPLE)") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /FORCE:MULTIPLE") + endif() + endif() +ENDIF(WIN32) + + +#=================================== +# Find dependencies ================ +#=================================== + +cmaki_find_package(dune-freetype) +include_directories(${DUNE-FREETYPE_INCLUDE_DIRS}) +list(APPEND CORE_LINK_LIBS ${DUNE-FREETYPE_LIBRARIES}) + +# # FreeType +# if(CMAKE_MAJOR_VERSION LESS 3) +# # Freetype changed the layout of its header files, we need to use +# # the FindFreetype module from cmake v3 at least, included here +# find_package(Freetype-v2fix REQUIRED) +# else() +# find_package(Freetype REQUIRED) +# endif() +# +# if(FREETYPE_FOUND) +# include_directories(${FREETYPE_INCLUDE_DIRS}) +# link_directories(${FREETYPE_LINK_DIRS}) +# list(APPEND CORE_LINK_LIBS ${FREETYPE_LIBRARY}) +# endif() +# mark_as_advanced(FREETYPE_INCLUDE_DIRS FREETYPE_LIBRARY FREETYPE_LINK_DIRECTORIES) + +# Boost and Python +if(BUILD_PYTHON_BINDINGS) + find_package(PythonInterp 2 REQUIRED) + find_package(PythonLibs 2 REQUIRED) + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -c "from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,prefix=''))" + OUTPUT_VARIABLE PYTHON_INSTDIR + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + if(PYTHONLIBS_FOUND) + include_directories(${PYTHON_INCLUDE_DIR}) + endif() + + #set(Boost_USE_STATIC_LIBS OFF) + #set(Boost_USE_MULTITHREADED ON) + find_package(Boost 1.40.0 COMPONENTS python REQUIRED) + if(Boost_FOUND) + include_directories(${Boost_INCLUDE_DIR}) + list(APPEND PY_BINDINGS_LINK_LIBS ${PYTHON_LIBRARY} ${Boost_LIBRARIES}) + endif() + +endif() + +#Lua +if(BUILD_LUA_BINDINGS) + if(CMAKE_MAJOR_VERSION LESS 3) + find_package(Lua-v2fix) + else() + find_package(Lua) + endif() + if(LUA_FOUND) + include_directories(${LUA_INCLUDE_DIR}) + list(APPEND LUA_BINDINGS_LINK_LIBS ${LUA_LIBRARIES}) + endif() +endif() + + +#=================================== +# Setup paths ====================== +#=================================== + +set(PROJECT_SOURCE_DIR ${PROJECT_SOURCE_DIR}/..) + +include_directories( + ${PROJECT_SOURCE_DIR}/Include +) + +# Include list of source files +include(FileList) + +#=================================== +# Build libraries ================== +#=================================== + +set(LIBRARIES Core Controls Debugger) + +foreach(library ${LIBRARIES}) + set(NAME Rocket${library}) + + add_library(${NAME} ${${library}_SRC_FILES} + ${${library}_HDR_FILES} + ${${library}_PUB_HDR_FILES} + ${MASTER_${library}_PUB_HDR_FILES} + ) + + set_target_properties(${NAME} PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${LIBROCKET_VERSION_MAJOR} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "i386;x86_64;" + ) + endif(NOT IOS) + endif(APPLE) + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + ) +endforeach(library) + +# Build python bindings +if(BUILD_PYTHON_BINDINGS) + set(LIBRARIES core controls) + + foreach(library ${LIBRARIES}) + set(NAME _rocket${library}) + + add_library(${NAME} MODULE ${Py${library}_SRC_FILES} + ${Py${library}_HDR_FILES} + ${Py${library}_PUB_HDR_FILES} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" + ) + endif(NOT IOS) + endif(APPLE) + + set_target_properties(${NAME} PROPERTIES PREFIX "") + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${PYTHON_INSTDIR} + ) + endforeach(library) +endif() + +# Build Lua bindings +if(BUILD_LUA_BINDINGS) + set(LIBRARIES Core Controls) + + foreach(library ${LIBRARIES}) + set(NAME Rocket${library}Lua) + + add_library(${NAME} ${Lua${library}_SRC_FILES} + ${Lua${library}_HDR_FILES} + ${Lua${library}_PUB_HDR_FILES} + ) + + set_target_properties(${NAME} PROPERTIES + VERSION ${PROJECT_VERSION} + SOVERSION ${LIBROCKET_VERSION_MAJOR} + ) + + if(APPLE) + if(NOT IOS) + set_target_properties(${NAME} PROPERTIES + OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" + ) + endif(NOT IOS) + endif(APPLE) + + install(TARGETS ${NAME} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + ) + endforeach(library) +endif() + + +#=================================== +# Link libraries =================== +#=================================== + +target_link_libraries(RocketCore ${CORE_LINK_LIBS}) +target_link_libraries(RocketControls RocketCore) +target_link_libraries(RocketDebugger RocketCore) + +if(BUILD_PYTHON_BINDINGS) + target_link_libraries(_rocketcore RocketCore ${PY_BINDINGS_LINK_LIBS}) + target_link_libraries(_rocketcontrols RocketControls ${PY_BINDINGS_LINK_LIBS}) +endif() + +if(BUILD_LUA_BINDINGS) + target_link_libraries(RocketCoreLua RocketCore ${LUA_BINDINGS_LINK_LIBS}) + target_link_libraries(RocketControlsLua RocketControls RocketCoreLua ${LUA_BINDINGS_LINK_LIBS}) +endif() + + +#=================================== +# Build samples ==================== +#=================================== + +# Build and link the samples +macro(bl_sample NAME) + if (WIN32) + add_executable(${NAME} WIN32 ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + elseif(APPLE) + add_executable(${NAME} MACOSX_BUNDLE ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + else() + add_executable(${NAME} ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) + endif() + + if (APPLE) + # We only support i386 for the samples as it still uses Carbon + set_target_properties(${NAME} PROPERTIES OSX_ARCHITECTURES "i386;" ) + endif() + + target_link_libraries(${NAME} ${ARGN}) +endmacro() + +if(BUILD_SAMPLES) + include(SampleFileList) + + set(samples treeview customlog drag loaddocument) + set(tutorials template datagrid datagrid_tree tutorial_drag) + + set(sample_LIBRARIES + shell + RocketCore + RocketControls + RocketDebugger + ) + + # Find OpenGL + find_package(OpenGL REQUIRED) + + if(OPENGL_FOUND) + include_directories(${OPENGL_INCLUDE_DIR}) + list(APPEND sample_LIBRARIES ${OPENGL_LIBRARIES}) + endif() + + # Set up required system libraries + if(WIN32) + if(SKIP_DIRECTX_SAMPLES) + message("-- Skipping all DirectX samples") + set(SKIP_DIRECTX9_SAMPLE ON) + set(SKIP_DIRECTX10_SAMPLE ON) + else() + message("-- Determing if DirectX samples can be built") + include(FindDirectX) + find_package(DirectX) + if(DirectX_FOUND) + set(DIRECTX_SAMPLE_LIST) + set(DIRECTX_SKIPPED_SAMPLE_LIST) + + # We should be able to build DirectX 9 sample + message("-- Determing if DirectX samples can be built - Yes") + + if(SKIP_DIRECTX9_SAMPLE) + message("-- Skipping build of DirectX 9 sample: User disabled") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + else() + if(DirectX_LIBRARY) + if(DirectX_D3DX9_LIBRARY) + list(APPEND DIRECTX_SAMPLE_LIST "DirectX9 ") + else() + set(SKIP_DIRECTX9_SAMPLE ON) + message("-- Skipping build of DirectX 9 sample: DirectX_D3DX9_LIBRARY not found") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + endif() + else() + set(SKIP_DIRECTX9_SAMPLE ON) + message("-- Skipping build of DirectX 9 sample: DirectX_LIBRARY not found") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") + endif() + endif() + + if(SKIP_DIRECTX10_SAMPLE) + message("-- Skipping build of DirectX 10 sample: User disabled") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") + else() + if(DirectX_D3D10_FOUND) + list(APPEND DIRECTX_SAMPLE_LIST "DirectX10 ") + else() + set(SKIP_DIRECTX10_SAMPLE ON) + message("-- Skipping build of DirectX 10 sample: Missing DirectX_D3D10_INCLUDE_DIR, DirectX_D3D10_LIBRARY or DirectX_D3DX10_LIBRARY") + list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") + endif() + endif() + + + if(DIRECTX_SAMPLE_LIST) + message("-- Enabled DirectX samples: " ${DIRECTX_SAMPLE_LIST}) + endif() + if(DIRECTX_SKIPPED_SAMPLE_LIST) + message("-- Disabled DirectX samples: " ${DIRECTX_SKIPPED_SAMPLE_LIST}) + endif() + else() + message("-- Determing if DirectX samples can be built - No") + set(SKIP_DIRECTX9_SAMPLE ON) + set(SKIP_DIRECTX10_SAMPLE ON) + endif() + endif() + elseif(APPLE) + include(FindCarbon) + find_package(Carbon REQUIRED) + + if (Carbon_FOUND) + include_directories(${Carbon_INCLUDE_DIR}) + list(APPEND sample_LIBRARIES ${Carbon_LIBRARIES}) + endif() + else() + find_package(X11 REQUIRED) + if (X11_FOUND) + list(APPEND sample_LIBRARIES ${X11_LIBRARIES}) + # shell/src/x11/InputX11.cpp:InitialiseX11Keymap uses Xkb if + # possible instead of XGetKeyboardMapping for performance + if(X11_Xkb_FOUND) + FIND_PACKAGE_MESSAGE(X11 "Found X11 KBlib: ${X11_X11_LIB}" "[${X11_X11_LIB}][${X11_XkbINCLUDE_DIR}]") + add_definitions(-DHAS_X11XKBLIB) + endif() + endif() + endif() + + set(SAMPLES_DIR opt/Rocket/Samples CACHE PATH "path to samples dir") + + # The samples and tutorials use the shell library + include_directories(${PROJECT_SOURCE_DIR}/Samples/shell/include) + + # Build and install sample shell library + add_library(shell STATIC ${shell_SRC_FILES} ${shell_HDR_FILES}) + if (APPLE) + # We only support i386 for the samples as it still uses Carbon + set_target_properties(shell PROPERTIES OSX_ARCHITECTURES "i386;") + endif() + + # Build and install the basic samples + foreach(sample ${samples}) + bl_sample(${sample} ${sample_LIBRARIES}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/${sample}) + install(TARGETS ${sample} + RUNTIME DESTINATION ${SAMPLES_DIR}/${sample} + BUNDLE DESTINATION ${SAMPLES_DIR}) + endforeach() + + if(WIN32) + if(NOT SKIP_DIRECTX9_SAMPLE) + include_directories(${DirectX_INCLUDE_DIR}) + + bl_sample(directx ${sample_LIBRARIES} ${DirectX_LIBRARY} ${DirectX_D3DX9_LIBRARY}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx) + install(TARGETS directx + RUNTIME DESTINATION ${SAMPLES_DIR}/directx + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + if(NOT SKIP_DIRECTX10_SAMPLE) + include_directories(${DirectX_INCLUDE_DIR} ${DirectX_D3D10_INCLUDE_DIRS}) + + bl_sample(directx10 ${sample_LIBRARIES} ${DirectX_D3D10_LIBRARIES}) + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx10) + install(TARGETS directx10 + RUNTIME DESTINATION ${SAMPLES_DIR}/directx10 + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + endif() + + message("-- Can SDL2 sample be built") + find_package(SDL) + if(SDL_FOUND) + find_package(SDL_image) + if(SDL_IMAGE_FOUND) + find_package(GLEW) + if(GLEW_FOUND) + message("-- Can SDL2 sample be built - yes") + include_directories(${SDL_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) + + bl_sample(sdl2 ${sample_LIBRARIES} ${SDL_LIBRARY} ${SDL_IMAGE_LIBRARY} ${GLEW_LIBRARY}) + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sdl2) + install(TARGETS sdl2 + RUNTIME DESTINATION ${SAMPLES_DIR}/sdl2 + BUNDLE DESTINATION ${SAMPLES_DIR}) + else() + message("-- Can SDL2 sample be built - GLEW not found") + endif() + else() + message("-- Can SDL2 sample be built - SDL2_image not found") + endif() + else() + message("-- Can SDL2 sample be built - SDL2 not found") + endif() + + + message("-- Can SFML 1.x sample be built") + find_package(SFML 1 COMPONENTS graphics window system) + if(NOT SFML_FOUND) + message("-- Can SFML 1.x sample be built - no") + elseif(SFML_VERSION_MAJOR GREATER 1) + message("-- Can SFML 1.x sample be built - no: Version 2 detected") + else() + message("-- Can SFML 1.x sample be built - yes") + + include_directories(${SFML_INCLUDE_DIR}) + + bl_sample(sfml ${sample_LIBRARIES} ${SFML_LIBRARIES}) + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml) + install(TARGETS sfml + RUNTIME DESTINATION ${SAMPLES_DIR}/sfml + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + message("-- Can SFML 2.x sample be built") + find_package(SFML 2 COMPONENTS graphics window system) + if(NOT SFML_FOUND) + message("-- Can SFML 2.x sample be built - no") + else() + find_package(GLEW) + if(GLEW_FOUND) + message("-- Can SFML 2.x sample be built - yes: with GLEW") + include_directories(${SFML_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) + add_definitions( -DENABLE_GLEW ) + bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES} ${GLEW_LIBRARY}) + else() + message("-- Can SFML 2.x sample be built - yes: without GLEW") + include_directories(${SFML_INCLUDE_DIR}) + bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES}) + endif() + + # The samples always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml2) + install(TARGETS sfml2 + RUNTIME DESTINATION ${SAMPLES_DIR}/sfml2 + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + # Build and install the tutorials + foreach(tutorial ${tutorials}) + bl_sample(${tutorial} ${sample_LIBRARIES}) + + # The tutorials always set this as their current working directory + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/tutorial/${tutorial}) + install(TARGETS ${tutorial} + RUNTIME DESTINATION ${SAMPLES_DIR}/${tutorial} + BUNDLE DESTINATION ${SAMPLES_DIR}) + endforeach() + + # Build and install invaders sample + bl_sample(invaders ${sample_LIBRARIES}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/invaders) + install(TARGETS invaders + RUNTIME DESTINATION ${SAMPLES_DIR}/invaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + + if(BUILD_PYTHON_BINDINGS) + # Build and install pyinvaders sample + bl_sample(pyinvaders ${sample_LIBRARIES} ${PYTHON_LIBRARIES} ${PY_BINDINGS_LINK_LIBS}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/pyinvaders) + install(TARGETS pyinvaders + RUNTIME DESTINATION ${SAMPLES_DIR}/pyinvaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() + + if(BUILD_LUA_BINDINGS) + bl_sample(luainvaders RocketCoreLua RocketControlsLua ${sample_LIBRARIES} ${LUA_BINDINGS_LINK_LIBS}) + install(DIRECTORY DESTINATION ${SAMPLES_DIR}/luainvaders) + install(TARGETS luainvaders + RUNTIME DESTINATION ${SAMPLES_DIR}/luainvaders + BUNDLE DESTINATION ${SAMPLES_DIR}) + endif() +endif() + + +#=================================== +# Installation ===================== +#=================================== + +if(BUILD_LUA_BINDINGS AND BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + ) +else() + if(NOT BUILD_LUA_BINDINGS AND NOT BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Python" EXCLUDE + PATTERN "Lua" EXCLUDE + ) + else() + if(BUILD_PYTHON_BINDINGS) + install(FILES ${PROJECT_SOURCE_DIR}/bin/rocket.py + DESTINATION ${PYTHON_INSTDIR} + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Lua" EXCLUDE + ) + else() + if(BUILD_LUA_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket + DESTINATION include + PATTERN "Python" EXCLUDE + ) + else() + message(FATAL_ERROR "ASSERT: Unexpected option combination, this is a logical impossibility.") + endif() + endif() + endif() +endif() + +if(BUILD_SAMPLES) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/assets + DESTINATION ${SAMPLES_DIR} + ) + + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/template/data + DESTINATION ${SAMPLES_DIR}/tutorial/template + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid/data + DESTINATION ${SAMPLES_DIR}/tutorial/datagrid + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid_tree/data + DESTINATION ${SAMPLES_DIR}/tutorial/datagrid_tree + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/tutorial_drag/data + DESTINATION ${SAMPLES_DIR}/tutorial/tutorial_drag + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/treeview/data + DESTINATION ${SAMPLES_DIR}/basic/treeview + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/drag/data + DESTINATION ${SAMPLES_DIR}/basic/drag + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/invaders/data + DESTINATION ${SAMPLES_DIR}/invaders + ) + + if(BUILD_PYTHON_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/pyinvaders/data + DESTINATION ${SAMPLES_DIR}/pyinvaders + ) + endif() + + if(BUILD_LUA_BINDINGS) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/data + DESTINATION ${SAMPLES_DIR}/luainvaders + ) + install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/lua + DESTINATION ${SAMPLES_DIR}/luainvaders + ) + endif() +endif() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt new file mode 100644 index 0000000..f4493c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt @@ -0,0 +1,2 @@ +add_subdirectory(Build) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt new file mode 100644 index 0000000..4ccb85d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 2.8) +cmake_policy(SET CMP0011 NEW) +project(noise CXX) + +# http://sourceforge.net/projects/libnoise + +file(GLOB SOURCE_CODE src/*.cpp src/*.h src/model/*.cpp src/model/*.h src/module/*.cpp src/module/*.h) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/etc) +add_library(${PACKAGE} SHARED ${SOURCE_CODE}) + +#IF(MSVC) +# add_definitions(/nologo /c /D_CRT_SECURE_NO_DEPRECATE) +#ENDIF() + +file(GLOB HEADER_CODE src/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}") + +file(GLOB HEADER_CODE src/model/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}/model") + +file(GLOB HEADER_CODE src/module/*.h ) +INSTALL( FILES ${HEADER_CODE} + DESTINATION "include/${PACKAGE}/module") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp new file mode 100644 index 0000000..08c2a9f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp @@ -0,0 +1,1147 @@ +#include "OIS.h" + +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +////////////////////////////////////Needed Windows Headers//////////// +#if defined OIS_WIN32_PLATFORM +# define WIN32_LEAN_AND_MEAN +# include "windows.h" +# include "resource.h" + +////////////////////////////////////Needed Linux Headers////////////// +#elif defined OIS_LINUX_PLATFORM +# include +# include +#else +# error Sorry, not yet implemented on this platform. +#endif + + +using namespace OIS; + +#if defined OIS_WIN32_PLATFORM + +// The dialog proc we have to give to CreateDialog +LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) +{ + return FALSE; +} + +#endif + +//////////// Event handler class declaration //////////////////////////////////////////////// +class Application; +class JoystickManager; +class EffectManager; + +class EventHandler : public KeyListener, public JoyStickListener +{ + protected: + + Application* _pApplication; + JoystickManager* _pJoystickMgr; + EffectManager* _pEffectMgr; + + public: + + EventHandler(Application* pApp); + void initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr); + + bool keyPressed( const KeyEvent &arg ); + bool keyReleased( const KeyEvent &arg ); + + bool buttonPressed( const JoyStickEvent &arg, int button ); + bool buttonReleased( const JoyStickEvent &arg, int button ); + + bool axisMoved( const JoyStickEvent &arg, int axis ); + + bool povMoved( const JoyStickEvent &arg, int pov ); +}; + +//////////// Variable classes //////////////////////////////////////////////////////// + +class Variable +{ + protected: + + double _dInitValue; + double _dValue; + + public: + + Variable(double dInitValue) : _dInitValue(dInitValue) { reset(); } + + double getValue() const { return _dValue; } + + void reset() { _dValue = _dInitValue; } + + virtual void setValue(double dValue) { _dValue = dValue; } + + virtual string toString() const + { + ostringstream oss; + oss << _dValue; + return oss.str(); + } + + virtual void update() {}; +}; + +class Constant : public Variable +{ + public: + + Constant(double dInitValue) : Variable(dInitValue) {} + + virtual void setValue(double dValue) { } + +}; + +class LimitedVariable : public Variable +{ + protected: + + double _dMinValue; + double _dMaxValue; + + public: + + LimitedVariable(double dInitValue, double dMinValue, double dMaxValue) + : _dMinValue(dMinValue), _dMaxValue(dMaxValue), Variable(dInitValue) + {} + + virtual void setValue(double dValue) + { + _dValue = dValue; + if (_dValue > _dMaxValue) + _dValue = _dMaxValue; + else if (_dValue < _dMinValue) + _dValue = _dMinValue; + } + +/* virtual string toString() const + { + ostringstream oss; + oss << setiosflags(ios_base::right) << setw(4) + << (int)(200.0 * getValue()/(_dMaxValue - _dMinValue)); // [-100%, +100%] + return oss.str(); + }*/ +}; + +class TriangleVariable : public LimitedVariable +{ + protected: + + double _dDeltaValue; + + public: + + TriangleVariable(double dInitValue, double dDeltaValue, double dMinValue, double dMaxValue) + : LimitedVariable(dInitValue, dMinValue, dMaxValue), _dDeltaValue(dDeltaValue) {}; + + virtual void update() + { + double dValue = getValue() + _dDeltaValue; + if (dValue > _dMaxValue) + { + dValue = _dMaxValue; + _dDeltaValue = -_dDeltaValue; + //cout << "Decreasing variable towards " << _dMinValue << endl; + } + else if (dValue < _dMinValue) + { + dValue = _dMinValue; + _dDeltaValue = -_dDeltaValue; + //cout << "Increasing variable towards " << _dMaxValue << endl; + } + setValue(dValue); + //cout << "TriangleVariable::update : delta=" << _dDeltaValue << ", value=" << dValue << endl; + } +}; + +//////////// Variable effect class ////////////////////////////////////////////////////////// + +typedef map MapVariables; +typedef void (*EffectVariablesApplier)(MapVariables& mapVars, Effect* pEffect); + +class VariableEffect +{ + protected: + + // Effect description + const char* _pszDesc; + + // The associate OIS effect + Effect* _pEffect; + + // The effect variables. + MapVariables _mapVariables; + + // The effect variables applier function. + EffectVariablesApplier _pfApplyVariables; + + // True if the effect is currently being played. + bool _bActive; + + public: + + VariableEffect(const char* pszDesc, Effect* pEffect, + const MapVariables& mapVars, const EffectVariablesApplier pfApplyVars) + : _pszDesc(pszDesc), _pEffect(pEffect), + _mapVariables(mapVars), _pfApplyVariables(pfApplyVars), _bActive(false) + {} + + ~VariableEffect() + { + if (_pEffect) + delete _pEffect; + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + if (iterVars->second) + delete iterVars->second; + + } + + void setActive(bool bActive = true) + { + reset(); + _bActive = bActive; + } + + bool isActive() + { + return _bActive; + } + + Effect* getFFEffect() + { + return _pEffect; + } + + const char* getDescription() const + { + return _pszDesc; + } + + void update() + { + if (isActive()) + { + // Update the variables. + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + iterVars->second->update(); + + // Apply the updated variable values to the effect. + _pfApplyVariables(_mapVariables, _pEffect); + } + } + + void reset() + { + MapVariables::iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + iterVars->second->reset(); + _pfApplyVariables(_mapVariables, _pEffect); + } + + string toString() const + { + string str; + MapVariables::const_iterator iterVars; + for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) + str += iterVars->first + ":" + iterVars->second->toString() + " "; + return str; + } +}; + +//////////// Joystick manager class //////////////////////////////////////////////////////// + +class JoystickManager +{ + protected: + + // Input manager. + InputManager* _pInputMgr; + + // Vectors to hold joysticks and associated force feedback devices + vector _vecJoys; + vector _vecFFDev; + + // Selected joystick + int _nCurrJoyInd; + + // Force feedback detected ? + bool _bFFFound; + + // Selected joystick master gain. + float _dMasterGain; + + // Selected joystick auto-center mode. + bool _bAutoCenter; + + public: + + JoystickManager(InputManager* pInputMgr, EventHandler* pEventHdlr) + : _pInputMgr(pInputMgr), _nCurrJoyInd(-1), _dMasterGain(0.5), _bAutoCenter(true) + + { + _bFFFound = false; + for( int nJoyInd = 0; nJoyInd < pInputMgr->getNumberOfDevices(OISJoyStick); ++nJoyInd ) + { + //Create the stick + JoyStick* pJoy = (JoyStick*)pInputMgr->createInputObject( OISJoyStick, true ); + cout << endl << "Created buffered joystick #" << nJoyInd << " '" << pJoy->vendor() + << "' (Id=" << pJoy->getID() << ")"; + + // Check for FF, and if so, keep the joy and dump FF info + ForceFeedback* pFFDev = (ForceFeedback*)pJoy->queryInterface(Interface::ForceFeedback ); + if( pFFDev ) + { + _bFFFound = true; + + // Keep the joy to play with it. + pJoy->setEventCallback(pEventHdlr); + _vecJoys.push_back(pJoy); + + // Keep also the associated FF device + _vecFFDev.push_back(pFFDev); + + // Dump FF supported effects and other info. + cout << endl << " * Number of force feedback axes : " + << pFFDev->getFFAxesNumber() << endl; + const ForceFeedback::SupportedEffectList &lstFFEffects = + pFFDev->getSupportedEffects(); + if (lstFFEffects.size() > 0) + { + cout << " * Supported effects :"; + ForceFeedback::SupportedEffectList::const_iterator itFFEff; + for(itFFEff = lstFFEffects.begin(); itFFEff != lstFFEffects.end(); ++itFFEff) + cout << " " << Effect::getEffectTypeName(itFFEff->second); + cout << endl << endl; + } + else + cout << "Warning: no supported effect found !" << endl; + } + else + { + cout << " (no force feedback support detected) => ignored." << endl << endl; + _pInputMgr->destroyInputObject(pJoy); + } + } + } + + ~JoystickManager() + { + for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) + _pInputMgr->destroyInputObject( _vecJoys[nJoyInd] ); + } + + size_t getNumberOfJoysticks() const + { + return _vecJoys.size(); + } + + bool wasFFDetected() const + { + return _bFFFound; + } + + enum EWhichJoystick { ePrevious=-1, eNext=+1 }; + + void selectJoystick(EWhichJoystick eWhich) + { + // Note: Reset the master gain to half the maximum and autocenter mode to Off, + // when really selecting a new joystick. + if (_nCurrJoyInd < 0) + { + _nCurrJoyInd = 0; + _dMasterGain = 0.5; // Half the maximum. + changeMasterGain(0.0); + } + else + { + _nCurrJoyInd += eWhich; + if (_nCurrJoyInd < -1 || _nCurrJoyInd >= (int)_vecJoys.size()) + _nCurrJoyInd = -1; + if (_vecJoys.size() > 1 && _nCurrJoyInd >= 0) + { + _dMasterGain = 0.5; // Half the maximum. + changeMasterGain(0.0); + } + } + } + + ForceFeedback* getCurrentFFDevice() + { + return (_nCurrJoyInd >= 0) ? _vecFFDev[_nCurrJoyInd] : 0; + } + + void changeMasterGain(float dDeltaPercent) + { + if (_nCurrJoyInd >= 0) + { + _dMasterGain += dDeltaPercent / 100; + if (_dMasterGain > 1.0) + _dMasterGain = 1.0; + else if (_dMasterGain < 0.0) + _dMasterGain = 0.0; + + _vecFFDev[_nCurrJoyInd]->setMasterGain(_dMasterGain); + } + } + + enum EAutoCenterHow { eOff, eOn, eToggle }; + + void changeAutoCenter(EAutoCenterHow eHow = eToggle) + { + if (_nCurrJoyInd >= 0) + { + if (eHow == eToggle) + _bAutoCenter = !_bAutoCenter; + else + _bAutoCenter = (eHow == eOn ? true : false); + _vecFFDev[_nCurrJoyInd]->setAutoCenterMode(_bAutoCenter); + } + } + + void captureEvents() + { + // This fires off buffered events for each joystick we have + for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) + if( _vecJoys[nJoyInd] ) + _vecJoys[nJoyInd]->capture(); + } + + string toString() const + { + // Warning: Wrong result if more than 10 joysticks ... + ostringstream oss; + oss << "Joy:" << (_nCurrJoyInd >= 0 ? (char)('0' + _nCurrJoyInd) : '-'); + oss << " Gain:" << setiosflags(ios_base::right) << setw(3) << (int)(_dMasterGain*100); + oss << "% Center:" << (_bAutoCenter ? " On " : "Off"); + return oss.str(); + } +}; + +//////////// Effect variables applier functions ///////////////////////////////////////////// +// These functions apply the given Variables to the given OIS::Effect + +// Variable force "Force" + optional "AttackFactor" constant, on a OIS::ConstantEffect +void forceVariableApplier(MapVariables& mapVars, Effect* pEffect) +{ + double dForce = mapVars["Force"]->getValue(); + double dAttackFactor = 1.0; + if (mapVars.find("AttackFactor") != mapVars.end()) + dAttackFactor = mapVars["AttackFactor"]->getValue(); + + ConstantEffect* pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = (int)dForce; + pConstForce->envelope.attackLevel = (unsigned short)fabs(dForce*dAttackFactor); + pConstForce->envelope.fadeLevel = (unsigned short)fabs(dForce); // Fade never reached, in fact. +} + +// Variable "Period" on an OIS::PeriodicEffect +void periodVariableApplier(MapVariables& mapVars, Effect* pEffect) +{ + double dPeriod = mapVars["Period"]->getValue(); + + PeriodicEffect* pPeriodForce = dynamic_cast(pEffect->getForceEffect()); + pPeriodForce->period = (unsigned int)dPeriod; +} + + +//////////// Effect manager class ////////////////////////////////////////////////////////// + +class EffectManager +{ + protected: + + // The joystick manager + JoystickManager* _pJoystickMgr; + + // Vector to hold variable effects + vector _vecEffects; + + // Selected effect + int _nCurrEffectInd; + + // Update frequency (Hz) + unsigned int _nUpdateFreq; + + // Indexes (in _vecEffects) of the variable effects that are playable by the selected joystick. + vector _vecPlayableEffectInd; + + + public: + + EffectManager(JoystickManager* pJoystickMgr, unsigned int nUpdateFreq) + : _pJoystickMgr(pJoystickMgr), _nUpdateFreq(nUpdateFreq), _nCurrEffectInd(-1) + { + Effect* pEffect; + MapVariables mapVars; + ConstantEffect* pConstForce; + PeriodicEffect* pPeriodForce; + + // Please don't modify or remove effects (unless there is some bug ...) : + // add new ones to enhance the test repository. + // And feel free to add any tested device, even when the test failed ! + // Tested devices capabilities : + // - Logitech G25 Racing wheel : + // * Only 1 axis => no directional 2D effect (only left and right) + // * Full support for constant force under WinXPSP2DX9 and Linux 2.6.22.9 + // * Full support for periodic forces under WinXPSP2DX9 + // (but poor rendering under 20ms period), and no support under Linux 2.6.22.9 + // * Full support reported (not tested) for all other forces under WinXPSP2DX9, + // and no support under Linux 2.6.22.9 + // - Logitech Rumble pad 2 : + // * Only 1 axis => no directional 2D effect (only left and right) + // * Forces amplitude is rendered through the inertia motors rotation frequency + // (stronger force => quicker rotation) + // * 2 inertia motors : 1 with small inertia, 1 with "heavy" one. + // => poor force feedback rendering ... + // * Support (poor) for all OIS forces under WinXPSP2DX9, + // and only for Triangle, Square and Sine periodic forces under Linux 2.6.22.9 + // (reported by enumeration, but does not seem to work actually) + // Master gain setting tests: + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // Auto-center mode setting tests: + // - Logitech G25 Racing wheel : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. + // - Logitech Rumble pad 2 : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. + + // 1) Constant force on 1 axis with 20s-period triangle oscillations in [-10K, +10K]. + // Notes: Linux: replay_length: no way to get it to work if not 0 or Effect::OIS_INFINITE + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (but only light motor involved), + // Linux2.6.22.9=Not supported + pEffect = new Effect(Effect::ConstantForce, Effect::Constant); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; // Linux/Win32: Same behaviour as 0. + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = 5000; //-10K to +10k + pConstForce->envelope.attackLength = 0; + pConstForce->envelope.attackLevel = (unsigned short)pConstForce->level; + pConstForce->envelope.fadeLength = 0; + pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; + + mapVars.clear(); + mapVars["Force"] = + new TriangleVariable(0.0, // F0 + 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle + -10000.0, // Fmin + 10000.0); // Fmax + mapVars["AttackFactor"] = new Constant(1.0); + + _vecEffects.push_back + (new VariableEffect + ("Constant force on 1 axis with 20s-period triangle oscillations " + "of its signed amplitude in [-10K, +10K]", + pEffect, mapVars, forceVariableApplier)); + + // 2) Constant force on 1 axis with noticeable attack + // with 20s-period triangle oscillations in [-10K, +10K]. + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (including attack, but only light motor involved), + // Linux2.6.22.9=Not supported. + pEffect = new Effect(Effect::ConstantForce, Effect::Constant); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; //(unsigned int)(1000000.0/_nUpdateFreq); // Linux: Does not work. + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pConstForce = dynamic_cast(pEffect->getForceEffect()); + pConstForce->level = 5000; //-10K to +10k + pConstForce->envelope.attackLength = (unsigned int)(1000000.0/_nUpdateFreq/2); + pConstForce->envelope.attackLevel = (unsigned short)(pConstForce->level*0.1); + pConstForce->envelope.fadeLength = 0; // Never reached, actually. + pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; // Idem + + mapVars.clear(); + mapVars["Force"] = + new TriangleVariable(0.0, // F0 + 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle + -10000.0, // Fmin + 10000.0); // Fmax + mapVars["AttackFactor"] = new Constant(0.1); + + _vecEffects.push_back + (new VariableEffect + ("Constant force on 1 axis with noticeable attack (app update period / 2)" + "and 20s-period triangle oscillations of its signed amplitude in [-10K, +10K]", + pEffect, mapVars, forceVariableApplier)); + + // 3) Triangle periodic force on 1 axis with 40s-period triangle oscillations + // of its period in [10, 400] ms, and constant amplitude + // Tested devices : + // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. + // - Logitech Rumble pad 2 : WinXPSP2DX9=OK but only light motor involved, + // Linux2.6.22.9=Failed. + pEffect = new Effect(Effect::PeriodicForce, Effect::Triangle); + pEffect->direction = Effect::North; + pEffect->trigger_button = 0; + pEffect->trigger_interval = 0; + pEffect->replay_length = Effect::OIS_INFINITE; + pEffect->replay_delay = 0; + pEffect->setNumAxes(1); + pPeriodForce = dynamic_cast(pEffect->getForceEffect()); + pPeriodForce->magnitude = 10000; // 0 to +10k + pPeriodForce->offset = 0; + pPeriodForce->phase = 0; // 0 to 35599 + pPeriodForce->period = 10000; // Micro-seconds + pPeriodForce->envelope.attackLength = 0; + pPeriodForce->envelope.attackLevel = (unsigned short)pPeriodForce->magnitude; + pPeriodForce->envelope.fadeLength = 0; + pPeriodForce->envelope.fadeLevel = (unsigned short)pPeriodForce->magnitude; + + mapVars.clear(); + mapVars["Period"] = + new TriangleVariable(1*1000.0, // P0 + 4*(400-10)*1000.0/_nUpdateFreq / 40.0, // dP for a 40s-period triangle + 10*1000.0, // Pmin + 400*1000.0); // Pmax + _vecEffects.push_back + (new VariableEffect + ("Periodic force on 1 axis with 40s-period triangle oscillations " + "of its period in [10, 400] ms, and constant amplitude", + pEffect, mapVars, periodVariableApplier)); + + } + + ~EffectManager() + { + vector::iterator iterEffs; + for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) + delete *iterEffs; + } + + void updateActiveEffects() + { + vector::iterator iterEffs; + for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) + if ((*iterEffs)->isActive()) + { + (*iterEffs)->update(); + _pJoystickMgr->getCurrentFFDevice()->modify((*iterEffs)->getFFEffect()); + } + } + + void checkPlayableEffects() + { + // Nothing to do if no joystick currently selected + if (!_pJoystickMgr->getCurrentFFDevice()) + return; + + // Get the list of indexes of effects that the selected device can play + _vecPlayableEffectInd.clear(); + for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) + { + const Effect::EForce eForce = _vecEffects[nEffInd]->getFFEffect()->force; + const Effect::EType eType = _vecEffects[nEffInd]->getFFEffect()->type; + if (_pJoystickMgr->getCurrentFFDevice()->supportsEffect(eForce, eType)) + { + _vecPlayableEffectInd.push_back(nEffInd); + } + } + + // Print details about playable effects + if (_vecPlayableEffectInd.empty()) + { + cout << endl << endl << "The device can't play any effect of the test set" << endl; + } + else + { + cout << endl << endl << "Selected device can play the following effects :" << endl; + for (size_t nEffIndInd = 0; nEffIndInd < _vecPlayableEffectInd.size(); nEffIndInd++) + printEffect(_vecPlayableEffectInd[nEffIndInd]); + cout << endl; + } + } + + enum EWhichEffect { ePrevious=-1, eNone=0, eNext=+1 }; + + void selectEffect(EWhichEffect eWhich) + { + + // Nothing to do if no joystick currently selected + if (!_pJoystickMgr->getCurrentFFDevice()) + { + cout << "\nNo Joystick selected.\n"; + return; + } + + // Nothing to do if joystick cannot play any effect + if (_vecPlayableEffectInd.empty()) + { + cout << "\nNo playable effects.\n"; + return; + } + + // If no effect selected, and next or previous requested, select the first one. + if (eWhich != eNone && _nCurrEffectInd < 0) + _nCurrEffectInd = 0; + + // Otherwise, remove the current one from the device, + // and then select the requested one if any. + else if (_nCurrEffectInd >= 0) + { + _pJoystickMgr->getCurrentFFDevice() + ->remove(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); + _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(false); + _nCurrEffectInd += eWhich; + if (_nCurrEffectInd < -1 || _nCurrEffectInd >= (int)_vecPlayableEffectInd.size()) + _nCurrEffectInd = -1; + } + + // If no effect must be selected, reset the selection index + if (eWhich == eNone) + { + _nCurrEffectInd = -1; + } + + // Otherwise, upload the new selected effect to the device if any. + else if (_nCurrEffectInd >= 0) + { + _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(true); + _pJoystickMgr->getCurrentFFDevice() + ->upload(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); + } + } + + void printEffect(size_t nEffInd) + { + cout << "* #" << nEffInd << " : " << _vecEffects[nEffInd]->getDescription() << endl; + } + + void printEffects() + { + for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) + printEffect(nEffInd); + } + + string toString() const + { + ostringstream oss; + oss << "DevMem: " << setiosflags(ios_base::right) << setw(3); + + //This causes constant exceptions with my device. Not needed for anything other than debugging + //if (_pJoystickMgr->getCurrentFFDevice()) + // oss << _pJoystickMgr->getCurrentFFDevice()->getFFMemoryLoad() << "%"; + //else + // oss << "----"; + + oss << " Effect:" << setw(2); + if (_nCurrEffectInd >= 0) + oss << _vecPlayableEffectInd[_nCurrEffectInd] + << " " << _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->toString(); + else + oss << "--"; + return oss.str(); + } +}; + +//////////// Application class //////////////////////////////////////////////////////// + +class Application +{ + protected: + InputManager* _pInputMgr; + EventHandler* _pEventHdlr; + Keyboard* _pKeyboard; + JoystickManager* _pJoystickMgr; + EffectManager* _pEffectMgr; + +#if defined OIS_WIN32_PLATFORM + HWND _hWnd; +#elif defined OIS_LINUX_PLATFORM + Display* _pXDisp; + Window _xWin; +#endif + + bool _bMustStop; + bool _bIsInitialized; + + int _nStatus; + + // App. hart beat frequency. + static const unsigned int _nHartBeatFreq = 20; // Hz + + // Effects update frequency (Hz) : Needs to be quite lower than app. hart beat frequency, + // if we want to be able to calmly study effect changes ... + static const unsigned int _nEffectUpdateFreq = 1; // Hz + + public: + + Application(int argc, const char* argv[]) + { + _pInputMgr = 0; + _pEventHdlr = 0; + _pKeyboard = 0; + _pJoystickMgr = 0; + _pEffectMgr = 0; + +#if defined OIS_WIN32_PLATFORM + _hWnd = 0; +#elif defined OIS_LINUX_PLATFORM + _pXDisp = 0; + _xWin = 0; +#endif + + _bMustStop = false; + + _bIsInitialized = false; + _nStatus = 0; + } + + int initialize() + { + ostringstream wnd; + +#if defined OIS_WIN32_PLATFORM + + //Create a capture window for Input Grabbing + _hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); + if( _hWnd == NULL ) + OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); + + ShowWindow(_hWnd, SW_SHOW); + + wnd << (size_t)_hWnd; + +#elif defined OIS_LINUX_PLATFORM + + //Connects to default X window + if( !(_pXDisp = XOpenDisplay(0)) ) + OIS_EXCEPT(E_General, "Error opening X!"); + + //Create a window + _xWin = XCreateSimpleWindow(_pXDisp,DefaultRootWindow(_pXDisp), 0,0, 100,100, 0, 0, 0); + + //bind our connection to that window + XMapWindow(_pXDisp, _xWin); + + //Select what events we want to listen to locally + XSelectInput(_pXDisp, _xWin, StructureNotifyMask); + + //Wait for Window to show up + XEvent event; + do { XNextEvent(_pXDisp, &event); } while(event.type != MapNotify); + + wnd << _xWin; + +#endif + + // Create OIS input manager + ParamList pl; + pl.insert(make_pair(string("WINDOW"), wnd.str())); + _pInputMgr = InputManager::createInputSystem(pl); + cout << _pInputMgr->inputSystemName() << " created." << endl; + + // Create the event handler. + _pEventHdlr = new EventHandler(this); + + // Create a simple keyboard + _pKeyboard = (Keyboard*)_pInputMgr->createInputObject( OISKeyboard, true ); + _pKeyboard->setEventCallback( _pEventHdlr ); + + // Create the joystick manager. + _pJoystickMgr = new JoystickManager(_pInputMgr, _pEventHdlr); + if( !_pJoystickMgr->wasFFDetected() ) + { + cout << "No Force Feedback device detected." << endl; + _nStatus = 1; + return _nStatus; + } + + // Create force feedback effect manager. + _pEffectMgr = new EffectManager(_pJoystickMgr, _nEffectUpdateFreq); + + // Initialize the event handler. + _pEventHdlr->initialize(_pJoystickMgr, _pEffectMgr); + + _bIsInitialized = true; + + return _nStatus; + } + +#if defined OIS_LINUX_PLATFORM + + // This is just here to show that you still receive x11 events, + // as the lib only needs mouse/key events + void checkX11Events() + { + XEvent event; + + //Poll x11 for events + while( XPending(_pXDisp) > 0 ) + { + XNextEvent(_pXDisp, &event); + } + } +#endif + + int run() + { + const unsigned int nMaxEffectUpdateCnt = _nHartBeatFreq / _nEffectUpdateFreq; + unsigned int nEffectUpdateCnt = 0; + + // Initailize app. if not already done, and exit if something went wrong. + if (!_bIsInitialized) + initialize(); + + if (!_bIsInitialized) + return _nStatus; + + try + { + //Main polling loop + while(!_bMustStop) + { + // This fires off buffered events for keyboards + _pKeyboard->capture(); + + // This fires off buffered events for each joystick we have + _pJoystickMgr->captureEvents(); + + // Update currently selected effects if time has come to. + if (!nEffectUpdateCnt) + { + _pEffectMgr->updateActiveEffects(); + nEffectUpdateCnt = nMaxEffectUpdateCnt; + } + else + nEffectUpdateCnt--; + + // Update state line. + cout << "\r" << _pJoystickMgr->toString() << " " << _pEffectMgr->toString() + << " "; + + //Throttle down CPU usage & handle OS events +#if defined OIS_WIN32_PLATFORM + Sleep( (DWORD)(1000.0/_nHartBeatFreq) ); + MSG msg; + while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) + { + TranslateMessage( &msg ); + DispatchMessage( &msg ); + } +#elif defined OIS_LINUX_PLATFORM + checkX11Events(); + usleep(1000000.0/_nHartBeatFreq); +#endif + } + } + catch( const Exception &ex ) + { +#if defined OIS_WIN32_PLATFORM + MessageBox(0, ex.eText, "Exception Raised!", MB_OK); +#else + cout << endl << "OIS Exception Caught!" << endl + << "\t" << ex.eText << "[Line " << ex.eLine << " in " << ex.eFile << "]" << endl; +#endif + } + + terminate(); + + return _nStatus; + } + + void stop() + { + _bMustStop = true; + } + + void terminate() + { + if (_pInputMgr) + { + _pInputMgr->destroyInputObject( _pKeyboard ); + _pKeyboard = 0; + if (_pJoystickMgr) + { + delete _pJoystickMgr; + _pJoystickMgr = 0; + } + InputManager::destroyInputSystem(_pInputMgr); + _pInputMgr = 0; + } + if (_pEffectMgr) + { + delete _pEffectMgr; + _pEffectMgr = 0; + } + if (_pEventHdlr) + { + delete _pEventHdlr; + _pEventHdlr = 0; + } + +#if defined OIS_LINUX_PLATFORM + // Be nice to X and clean up the x window + XDestroyWindow(_pXDisp, _xWin); + XCloseDisplay(_pXDisp); +#endif + } + + JoystickManager* getJoystickManager() + { + return _pJoystickMgr; + } + + EffectManager* getEffectManager() + { + return _pEffectMgr; + } + + void printHelp() + { + cout << endl + << "Keyboard actions :" << endl + << "* Escape : Exit App" << endl + << "* H : This help menu" << endl + << "* Right/Left : Select next/previous joystick among the FF capable detected ones" << endl + << "* Up/Down : Select next/previous effect for the selected joystick" << endl + << "* PgUp/PgDn : Increase/decrease from 5% the master gain " + << "for all the joysticks" << endl + << "* Space : Toggle auto-centering on all the joysticks" << endl; + if (_bIsInitialized) + { + cout << endl << "Implemented effects :" << endl << endl; + _pEffectMgr->printEffects(); + cout << endl; + } + } +}; + +//////////// Event handler class definition //////////////////////////////////////////////// + +EventHandler::EventHandler(Application* pApp) +: _pApplication(pApp) +{} + +void EventHandler::initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr) +{ + _pJoystickMgr = pJoystickMgr; + _pEffectMgr = pEffectMgr; +} + +bool EventHandler::keyPressed( const KeyEvent &arg ) +{ + switch (arg.key) + { + // Quit. + case KC_ESCAPE: + _pApplication->stop(); + break; + + // Help. + case KC_H: + _pApplication->printHelp(); + break; + + // Change current joystick. + case KC_RIGHT: + _pEffectMgr->selectEffect(EffectManager::eNone); + _pJoystickMgr->selectJoystick(JoystickManager::eNext); + _pEffectMgr->checkPlayableEffects(); + break; + case KC_LEFT: + _pEffectMgr->selectEffect(EffectManager::eNone); + _pJoystickMgr->selectJoystick(JoystickManager::ePrevious); + _pEffectMgr->checkPlayableEffects(); + break; + + // Change current effect. + case KC_UP: + _pEffectMgr->selectEffect(EffectManager::eNext); + break; + case KC_DOWN: + _pEffectMgr->selectEffect(EffectManager::ePrevious); + break; + + // Change current master gain. + case KC_PGUP: + _pJoystickMgr->changeMasterGain(5.0); // Percent + break; + case KC_PGDOWN: + _pJoystickMgr->changeMasterGain(-5.0); // Percent + break; + + // Toggle auto-center mode. + case KC_SPACE: + _pJoystickMgr->changeAutoCenter(); + break; + + default: + cout << "Non mapped key: " << arg.key << endl; + } + return true; +} + +bool EventHandler::keyReleased( const KeyEvent &arg ) +{ + return true; +} + +bool EventHandler::buttonPressed( const JoyStickEvent &arg, int button ) +{ + return true; +} +bool EventHandler::buttonReleased( const JoyStickEvent &arg, int button ) +{ + return true; +} +bool EventHandler::axisMoved( const JoyStickEvent &arg, int axis ) +{ + return true; +} +bool EventHandler::povMoved( const JoyStickEvent &arg, int pov ) +{ + return true; +} + +//========================================================================================== +int main(int argc, const char* argv[]) +{ + + cout << endl + << "This is a simple command line Force Feedback testing demo ..." << endl + << "All connected joystick devices will be created and if FF Support is found," << endl + << "you'll be able to play some predefined variable effects on them." << endl << endl + << "Note: 1 effect can be played on 1 joystick at a time for the moment." << endl << endl; + + Application app(argc, argv); + + int status = app.initialize(); + + if (!status) + { + app.printHelp(); + + status = app.run(); + } + + cout << endl << endl << "Exiting ..." << endl << endl; + +#if defined OIS_WIN32_PLATFORM && _DEBUG + cout << "Click on this window and ..." << endl; + system("pause"); +#endif + + exit(status); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am new file mode 100644 index 0000000..926f7f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am @@ -0,0 +1,11 @@ +INCLUDES = $(STLPORT_CFLAGS) -I$(top_srcdir)/includes $(CFLAGS) -I/usr/X11R6/include + +noinst_PROGRAMS = ConsoleApp FFConsoleTest + +ConsoleApp_SOURCES = OISConsole.cpp +ConsoleApp_LDFLAGS = -L$(top_builddir)/src +ConsoleApp_LDADD = -lOIS -lX11 -lXext + +FFConsoleTest_SOURCES = FFConsoleDemo.cpp +FFConsoleTest_LDFLAGS = -L$(top_builddir)/src +FFConsoleTest_LDADD = -lOIS -lX11 -lXext diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp new file mode 100644 index 0000000..0850004 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp @@ -0,0 +1,459 @@ +//////////////////////////////// OS Nuetral Headers //////////////// +#include "OISInputManager.h" +#include "OISException.h" +#include "OISKeyboard.h" +#include "OISMouse.h" +#include "OISJoyStick.h" +#include "OISEvents.h" + +//Advanced Usage +#include "OISForceFeedback.h" + +#include +#include +#include + +////////////////////////////////////Needed Windows Headers//////////// +#if defined OIS_WIN32_PLATFORM +# define WIN32_LEAN_AND_MEAN +# include "windows.h" +# ifdef min +# undef min +# endif +# include "resource.h" + LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ); +////////////////////////////////////////////////////////////////////// +////////////////////////////////////Needed Linux Headers////////////// +#elif defined OIS_LINUX_PLATFORM +# include +# include + void checkX11Events(); +////////////////////////////////////////////////////////////////////// +////////////////////////////////////Needed Mac Headers////////////// +#elif defined OIS_APPLE_PLATFORM +# include + void checkMacEvents(); +#endif +////////////////////////////////////////////////////////////////////// +using namespace OIS; + +//-- Some local prototypes --// +void doStartup(); +void handleNonBufferedKeys(); +void handleNonBufferedMouse(); +void handleNonBufferedJoy( JoyStick* js ); + +//-- Easy access globals --// +bool appRunning = true; //Global Exit Flag + +const char *g_DeviceType[6] = {"OISUnknown", "OISKeyboard", "OISMouse", "OISJoyStick", + "OISTablet", "OISOther"}; + +InputManager *g_InputManager = 0; //Our Input System +Keyboard *g_kb = 0; //Keyboard Device +Mouse *g_m = 0; //Mouse Device +JoyStick* g_joys[4] = {0,0,0,0}; //This demo supports up to 4 controllers + +//-- OS Specific Globals --// +#if defined OIS_WIN32_PLATFORM + HWND hWnd = 0; +#elif defined OIS_LINUX_PLATFORM + Display *xDisp = 0; + Window xWin = 0; +#elif defined OIS_APPLE_PLATFORM + WindowRef mWin = 0; +#endif + +//////////// Common Event handler class //////// +class EventHandler : public KeyListener, public MouseListener, public JoyStickListener +{ +public: + EventHandler() {} + ~EventHandler() {} + bool keyPressed( const KeyEvent &arg ) { + std::cout << " KeyPressed {" << arg.key + << ", " << ((Keyboard*)(arg.device))->getAsString(arg.key) + << "} || Character (" << (char)arg.text << ")" << std::endl; + return true; + } + bool keyReleased( const KeyEvent &arg ) { + if( arg.key == KC_ESCAPE || arg.key == KC_Q ) + appRunning = false; + std::cout << "KeyReleased {" << ((Keyboard*)(arg.device))->getAsString(arg.key) << "}\n"; + return true; + } + bool mouseMoved( const MouseEvent &arg ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouseMoved: Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool mousePressed( const MouseEvent &arg, MouseButtonID id ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouse button #" << id << " pressed. Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool mouseReleased( const MouseEvent &arg, MouseButtonID id ) { + const OIS::MouseState& s = arg.state; + std::cout << "\nMouse button #" << id << " released. Abs(" + << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" + << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; + return true; + } + bool buttonPressed( const JoyStickEvent &arg, int button ) { + std::cout << std::endl << arg.device->vendor() << ". Button Pressed # " << button; + return true; + } + bool buttonReleased( const JoyStickEvent &arg, int button ) { + std::cout << std::endl << arg.device->vendor() << ". Button Released # " << button; + return true; + } + bool axisMoved( const JoyStickEvent &arg, int axis ) + { + //Provide a little dead zone + if( arg.state.mAxes[axis].abs > 2500 || arg.state.mAxes[axis].abs < -2500 ) + std::cout << std::endl << arg.device->vendor() << ". Axis # " << axis << " Value: " << arg.state.mAxes[axis].abs; + return true; + } + bool povMoved( const JoyStickEvent &arg, int pov ) + { + std::cout << std::endl << arg.device->vendor() << ". POV" << pov << " "; + + if( arg.state.mPOV[pov].direction & Pov::North ) //Going up + std::cout << "North"; + else if( arg.state.mPOV[pov].direction & Pov::South ) //Going down + std::cout << "South"; + + if( arg.state.mPOV[pov].direction & Pov::East ) //Going right + std::cout << "East"; + else if( arg.state.mPOV[pov].direction & Pov::West ) //Going left + std::cout << "West"; + + if( arg.state.mPOV[pov].direction == Pov::Centered ) //stopped/centered out + std::cout << "Centered"; + return true; + } + + bool vector3Moved( const JoyStickEvent &arg, int index) + { + std::cout.precision(2); + std::cout.flags(std::ios::fixed | std::ios::right); + std::cout << std::endl << arg.device->vendor() << ". Orientation # " << index + << " X Value: " << arg.state.mVectors[index].x + << " Y Value: " << arg.state.mVectors[index].y + << " Z Value: " << arg.state.mVectors[index].z; + std::cout.precision(); + std::cout.flags(); + return true; + } +}; + +//Create a global instance +EventHandler handler; + +int main() +{ + std::cout << "\n\n*** OIS Console Demo App is starting up... *** \n"; + try + { + doStartup(); + std::cout << "\nStartup done... Hit 'q' or ESC to exit.\n\n"; + + while(appRunning) + { + //Throttle down CPU usage + #if defined OIS_WIN32_PLATFORM + Sleep(90); + MSG msg; + while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) + { + TranslateMessage( &msg ); + DispatchMessage( &msg ); + } + #elif defined OIS_LINUX_PLATFORM + checkX11Events(); + usleep( 500 ); + #elif defined OIS_APPLE_PLATFORM + checkMacEvents(); + usleep( 500 ); + #endif + + if( g_kb ) + { + g_kb->capture(); + if( !g_kb->buffered() ) + handleNonBufferedKeys(); + } + + if( g_m ) + { + g_m->capture(); + if( !g_m->buffered() ) + handleNonBufferedMouse(); + } + + for( int i = 0; i < 4 ; ++i ) + { + if( g_joys[i] ) + { + g_joys[i]->capture(); + if( !g_joys[i]->buffered() ) + handleNonBufferedJoy( g_joys[i] ); + } + } + } + } + catch( const Exception &ex ) + { + #if defined OIS_WIN32_PLATFORM + MessageBox( NULL, ex.eText, "An exception has occurred!", MB_OK | + MB_ICONERROR | MB_TASKMODAL); + #else + std::cout << "\nOIS Exception Caught!\n" << "\t" << ex.eText << "[Line " + << ex.eLine << " in " << ex.eFile << "]\nExiting App"; + #endif + } + catch(std::exception &ex) + { + std::cout << "Caught std::exception: what = " << ex.what() << std::endl; + } + + //Destroying the manager will cleanup unfreed devices + if( g_InputManager ) + InputManager::destroyInputSystem(g_InputManager); + +#if defined OIS_LINUX_PLATFORM + // Be nice to X and clean up the x window + XDestroyWindow(xDisp, xWin); + XCloseDisplay(xDisp); +#endif + + std::cout << "\n\nGoodbye\n\n"; + return 0; +} + +void doStartup() +{ + ParamList pl; + +#if defined OIS_WIN32_PLATFORM + //Create a capture window for Input Grabbing + hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); + if( hWnd == NULL ) + OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); + + ShowWindow(hWnd, SW_SHOW); + + std::ostringstream wnd; + wnd << (size_t)hWnd; + + pl.insert(std::make_pair( std::string("WINDOW"), wnd.str() )); + + //Default mode is foreground exclusive..but, we want to show mouse - so nonexclusive +// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_FOREGROUND" ))); +// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_NONEXCLUSIVE"))); +#elif defined OIS_LINUX_PLATFORM + //Connects to default X window + if( !(xDisp = XOpenDisplay(0)) ) + OIS_EXCEPT(E_General, "Error opening X!"); + //Create a window + xWin = XCreateSimpleWindow(xDisp,DefaultRootWindow(xDisp), 0,0, 100,100, 0, 0, 0); + //bind our connection to that window + XMapWindow(xDisp, xWin); + //Select what events we want to listen to locally + XSelectInput(xDisp, xWin, StructureNotifyMask); + XEvent evtent; + do + { + XNextEvent(xDisp, &evtent); + } while(evtent.type != MapNotify); + + std::ostringstream wnd; + wnd << xWin; + + pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); + + //For this demo, show mouse and do not grab (confine to window) +// pl.insert(std::make_pair(std::string("x11_mouse_grab"), std::string("false"))); +// pl.insert(std::make_pair(std::string("x11_mouse_hide"), std::string("false"))); +#elif defined OIS_APPLE_PLATFORM + // create the window rect in global coords + ::Rect windowRect; + windowRect.left = 0; + windowRect.top = 0; + windowRect.right = 300; + windowRect.bottom = 300; + + // set the default attributes for the window + WindowAttributes windowAttrs = kWindowStandardDocumentAttributes + | kWindowStandardHandlerAttribute + | kWindowInWindowMenuAttribute + | kWindowHideOnFullScreenAttribute; + + // Create the window + CreateNewWindow(kDocumentWindowClass, windowAttrs, &windowRect, &mWin); + + // Color the window background black + SetThemeWindowBackground (mWin, kThemeBrushBlack, true); + + // Set the title of our window + CFStringRef titleRef = CFStringCreateWithCString( kCFAllocatorDefault, "OIS Input", kCFStringEncodingASCII ); + SetWindowTitleWithCFString( mWin, titleRef ); + + // Center our window on the screen + RepositionWindow( mWin, NULL, kWindowCenterOnMainScreen ); + + // Install the event handler for the window + InstallStandardEventHandler(GetWindowEventTarget(mWin)); + + // This will give our window focus, and not lock it to the terminal + ProcessSerialNumber psn = { 0, kCurrentProcess }; + TransformProcessType( &psn, kProcessTransformToForegroundApplication ); + SetFrontProcess(&psn); + + // Display and select our window + ShowWindow(mWin); + SelectWindow(mWin); + + std::ostringstream wnd; + wnd << (unsigned int)mWin; //cast to int so it gets encoded correctly (else it gets stored as a hex string) + std::cout << "WindowRef: " << mWin << " WindowRef as int: " << wnd.str() << "\n"; + pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); +#endif + + //This never returns null.. it will raise an exception on errors + g_InputManager = InputManager::createInputSystem(pl); + + //Lets enable all addons that were compiled in: + g_InputManager->enableAddOnFactory(InputManager::AddOn_All); + + //Print debugging information + unsigned int v = g_InputManager->getVersionNumber(); + std::cout << "OIS Version: " << (v>>16 ) << "." << ((v>>8) & 0x000000FF) << "." << (v & 0x000000FF) + << "\nRelease Name: " << g_InputManager->getVersionName() + << "\nManager: " << g_InputManager->inputSystemName() + << "\nTotal Keyboards: " << g_InputManager->getNumberOfDevices(OISKeyboard) + << "\nTotal Mice: " << g_InputManager->getNumberOfDevices(OISMouse) + << "\nTotal JoySticks: " << g_InputManager->getNumberOfDevices(OISJoyStick); + + //List all devices + DeviceList list = g_InputManager->listFreeDevices(); + for( DeviceList::iterator i = list.begin(); i != list.end(); ++i ) + std::cout << "\n\tDevice: " << g_DeviceType[i->first] << " Vendor: " << i->second; + + g_kb = (Keyboard*)g_InputManager->createInputObject( OISKeyboard, true ); + g_kb->setEventCallback( &handler ); + + g_m = (Mouse*)g_InputManager->createInputObject( OISMouse, true ); + g_m->setEventCallback( &handler ); + const MouseState &ms = g_m->getMouseState(); + ms.width = 100; + ms.height = 100; + + try + { + //This demo uses at most 4 joysticks - use old way to create (i.e. disregard vendor) + int numSticks = std::min(g_InputManager->getNumberOfDevices(OISJoyStick), 4); + for( int i = 0; i < numSticks; ++i ) + { + g_joys[i] = (JoyStick*)g_InputManager->createInputObject( OISJoyStick, true ); + g_joys[i]->setEventCallback( &handler ); + std::cout << "\n\nCreating Joystick " << (i + 1) + << "\n\tAxes: " << g_joys[i]->getNumberOfComponents(OIS_Axis) + << "\n\tSliders: " << g_joys[i]->getNumberOfComponents(OIS_Slider) + << "\n\tPOV/HATs: " << g_joys[i]->getNumberOfComponents(OIS_POV) + << "\n\tButtons: " << g_joys[i]->getNumberOfComponents(OIS_Button) + << "\n\tVector3: " << g_joys[i]->getNumberOfComponents(OIS_Vector3); + } + } + catch(OIS::Exception &ex) + { + std::cout << "\nException raised on joystick creation: " << ex.eText << std::endl; + } +} + +void handleNonBufferedKeys() +{ + if( g_kb->isKeyDown( KC_ESCAPE ) || g_kb->isKeyDown( KC_Q ) ) + appRunning = false; + + if( g_kb->isModifierDown(Keyboard::Shift) ) + std::cout << "Shift is down..\n"; + if( g_kb->isModifierDown(Keyboard::Alt) ) + std::cout << "Alt is down..\n"; + if( g_kb->isModifierDown(Keyboard::Ctrl) ) + std::cout << "Ctrl is down..\n"; +} + +void handleNonBufferedMouse() +{ + //Just dump the current mouse state + const MouseState &ms = g_m->getMouseState(); + std::cout << "\nMouse: Abs(" << ms.X.abs << " " << ms.Y.abs << " " << ms.Z.abs + << ") B: " << ms.buttons << " Rel(" << ms.X.rel << " " << ms.Y.rel << " " << ms.Z.rel << ")"; +} + +void handleNonBufferedJoy( JoyStick* js ) +{ + //Just dump the current joy state + const JoyStickState &joy = js->getJoyStickState(); + for( unsigned int i = 0; i < joy.mAxes.size(); ++i ) + std::cout << "\nAxis " << i << " X: " << joy.mAxes[i].abs; +} + +#if defined OIS_WIN32_PLATFORM +LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) +{ + return FALSE; +} +#endif + +#if defined OIS_LINUX_PLATFORM +//This is just here to show that you still recieve x11 events, as the lib only needs mouse/key events +void checkX11Events() +{ + XEvent event; + + //Poll x11 for events (keyboard and mouse events are caught here) + while( XPending(xDisp) > 0 ) + { + XNextEvent(xDisp, &event); + //Handle Resize events + if( event.type == ConfigureNotify ) + { + if( g_m ) + { + const MouseState &ms = g_m->getMouseState(); + ms.width = event.xconfigure.width; + ms.height = event.xconfigure.height; + } + } + else if( event.type == DestroyNotify ) + { + std::cout << "Exiting...\n"; + appRunning = false; + } + else + std::cout << "\nUnknown X Event: " << event.type << std::endl; + } +} +#endif + +#if defined OIS_APPLE_PLATFORM +void checkMacEvents() +{ + //TODO - Check for window resize events, and then adjust the members of mousestate + EventRef event = NULL; + EventTargetRef targetWindow = GetEventDispatcherTarget(); + + if( ReceiveNextEvent( 0, NULL, kEventDurationNoWait, true, &event ) == noErr ) + { + SendEventToEventTarget(event, targetWindow); + std::cout << "Event : " << GetEventKind(event) << "\n"; + ReleaseEvent(event); + } +} +#endif diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp new file mode 100644 index 0000000..6e70213 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp @@ -0,0 +1,563 @@ +/* +The zlib/libpng License + +Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) + +This software is provided 'as-is', without any express or implied warranty. In no event will +the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial +applications, and to alter it and redistribute it freely, subject to the following +restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim that + you wrote the original software. If you use this software in a product, + an acknowledgment in the product documentation would be appreciated but is + not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "linux/LinuxForceFeedback.h" +#include "OISException.h" + +#include +#include +#include + +#ifdef HAVE_UNISTD_H +#include +#endif + +using namespace OIS; + +// 0 = No trace; 1 = Important traces; 2 = Debug traces +#define OIS_LINUX_JOYFF_DEBUG 1 + +#ifdef OIS_LINUX_JOYFF_DEBUG +# include + using namespace std; +#endif + +//--------------------------------------------------------------// +LinuxForceFeedback::LinuxForceFeedback(int deviceID) : + ForceFeedback(), mJoyStick(deviceID) +{ +} + +//--------------------------------------------------------------// +LinuxForceFeedback::~LinuxForceFeedback() +{ + // Unload all effects. + for(EffectList::iterator i = mEffectList.begin(); i != mEffectList.end(); ++i ) + { + struct ff_effect *linEffect = i->second; + if( linEffect ) + _unload(linEffect->id); + } + + mEffectList.clear(); +} + +//--------------------------------------------------------------// +unsigned short LinuxForceFeedback::getFFMemoryLoad() +{ + int nEffects = -1; + if (ioctl(mJoyStick, EVIOCGEFFECTS, &nEffects) == -1) + OIS_EXCEPT(E_General, "Unknown error reading max number of uploaded effects."); +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << "LinuxForceFeedback("<< mJoyStick + << ") : Read device max number of uploaded effects : " << nEffects << endl; +#endif + + return (unsigned short int)(nEffects > 0 ? 100.0*mEffectList.size()/nEffects : 100); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::setMasterGain(float value) +{ + if (!mSetGainSupport) + { +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain " + << "is not supported by the device" << endl; +#endif + return; + } + + struct input_event event; + + memset(&event, 0, sizeof(event)); + event.type = EV_FF; + event.code = FF_GAIN; + if (value < 0.0) + value = 0.0; + else if (value > 1.0) + value = 1.0; + event.value = (__s32)(value * 0xFFFFUL); + +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain to " + << value << " => " << event.value << endl; +#endif + + if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { + OIS_EXCEPT(E_General, "Unknown error changing master gain."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::setAutoCenterMode(bool enabled) +{ + if (!mSetAutoCenterSupport) + { +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting auto-center mode " + << "is not supported by the device" << endl; +#endif + return; + } + + struct input_event event; + + memset(&event, 0, sizeof(event)); + event.type = EV_FF; + event.code = FF_AUTOCENTER; + event.value = (__s32)(enabled*0xFFFFFFFFUL); + +#if (OIS_LINUX_JOYFF_DEBUG > 0) + cout << "LinuxForceFeedback("<< mJoyStick << ") : Toggling auto-center to " + << enabled << " => 0x" << hex << event.value << dec << endl; +#endif + + if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { + OIS_EXCEPT(E_General, "Unknown error toggling auto-center."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::upload( const Effect* effect ) +{ + switch( effect->force ) + { + case OIS::Effect::ConstantForce: + _updateConstantEffect(effect); + break; + case OIS::Effect::ConditionalForce: + _updateConditionalEffect(effect); + break; + case OIS::Effect::PeriodicForce: + _updatePeriodicEffect(effect); + break; + case OIS::Effect::RampForce: + _updateRampEffect(effect); + break; + case OIS::Effect::CustomForce: + //_updateCustomEffect(effect); + //break; + default: + OIS_EXCEPT(E_NotImplemented, "Requested force not implemented yet, sorry!"); + break; + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::modify( const Effect* effect ) +{ + upload(effect); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::remove( const Effect* effect ) +{ + //Get the effect - if it exists + EffectList::iterator i = mEffectList.find(effect->_handle); + if( i != mEffectList.end() ) + { + struct ff_effect *linEffect = i->second; + if( linEffect ) + { + _stop(effect->_handle); + + _unload(effect->_handle); + + free(linEffect); + + mEffectList.erase(i); + } + else + mEffectList.erase(i); + } +} + +//--------------------------------------------------------------// +// To Signed16/Unsigned15 safe conversions +#define MaxUnsigned15Value 0x7FFF +#define toUnsigned15(value) \ + (__u16)((value) < 0 ? 0 : ((value) > MaxUnsigned15Value ? MaxUnsigned15Value : (value))) + +#define MaxSigned16Value 0x7FFF +#define MinSigned16Value -0x7FFF +#define toSigned16(value) \ + (__s16)((value) < MinSigned16Value ? MinSigned16Value : ((value) > MaxSigned16Value ? MaxSigned16Value : (value))) + +// OIS to Linux duration +#define LinuxInfiniteDuration 0xFFFF +#define OISDurationUnitMS 1000 // OIS duration unit (microseconds), expressed in milliseconds (theLinux duration unit) + +// linux/input.h : All duration values are expressed in ms. Values above 32767 ms (0x7fff) +// should not be used and have unspecified results. +#define LinuxDuration(oisDuration) ((oisDuration) == Effect::OIS_INFINITE ? LinuxInfiniteDuration \ + : toUnsigned15((oisDuration)/OISDurationUnitMS)) + + +// OIS to Linux levels +#define OISMaxLevel 10000 +#define LinuxMaxLevel 0x7FFF + +// linux/input.h : Valid range for the attack and fade levels is 0x0000 - 0x7fff +#define LinuxPositiveLevel(oisLevel) toUnsigned15(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) + +#define LinuxSignedLevel(oisLevel) toSigned16(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) + + +//--------------------------------------------------------------// +void LinuxForceFeedback::_setCommonProperties(struct ff_effect *event, + struct ff_envelope *ffenvelope, + const Effect* effect, const Envelope *envelope ) +{ + memset(event, 0, sizeof(struct ff_effect)); + + if (envelope && ffenvelope && envelope->isUsed()) { + ffenvelope->attack_length = LinuxDuration(envelope->attackLength); + ffenvelope->attack_level = LinuxPositiveLevel(envelope->attackLevel); + ffenvelope->fade_length = LinuxDuration(envelope->fadeLength); + ffenvelope->fade_level = LinuxPositiveLevel(envelope->fadeLevel); + } + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl; + if (envelope && ffenvelope) + { + cout << " Enveloppe :" << endl + << " AttackLen : " << envelope->attackLength + << " => " << ffenvelope->attack_length << endl + << " AttackLvl : " << envelope->attackLevel + << " => " << ffenvelope->attack_level << endl + << " FadeLen : " << envelope->fadeLength + << " => " << ffenvelope->fade_length << endl + << " FadeLvl : " << envelope->fadeLevel + << " => " << ffenvelope->fade_level << endl; + } +#endif + + event->direction = (__u16)(1 + (effect->direction*45.0+135.0)*0xFFFFUL/360.0); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Direction : " << Effect::getDirectionName(effect->direction) + << " => 0x" << hex << event->direction << dec << endl; +#endif + + // TODO trigger_button 0 vs. -1 + event->trigger.button = effect->trigger_button; // < 0 ? 0 : effect->trigger_button; + event->trigger.interval = LinuxDuration(effect->trigger_interval); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Trigger :" << endl + << " Button : " << effect->trigger_button + << " => " << event->trigger.button << endl + << " Interval : " << effect->trigger_interval + << " => " << event->trigger.interval << endl; +#endif + + event->replay.length = LinuxDuration(effect->replay_length); + event->replay.delay = LinuxDuration(effect->replay_delay); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Replay :" << endl + << " Length : " << effect->replay_length + << " => " << event->replay.length << endl + << " Delay : " << effect->replay_delay + << " => " << event->replay.delay << endl; +#endif +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateConstantEffect( const Effect* eff ) +{ + struct ff_effect event; + + ConstantEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); + + event.type = FF_CONSTANT; + event.id = -1; + + event.u.constant.level = LinuxSignedLevel(effect->level); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Level : " << effect->level + << " => " << event.u.constant.level << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateRampEffect( const Effect* eff ) +{ + struct ff_effect event; + + RampEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); + + event.type = FF_RAMP; + event.id = -1; + + event.u.ramp.start_level = LinuxSignedLevel(effect->startLevel); + event.u.ramp.end_level = LinuxSignedLevel(effect->endLevel); + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " StartLevel : " << effect->startLevel + << " => " << event.u.ramp.start_level << endl + << " EndLevel : " << effect->endLevel + << " => " << event.u.ramp.end_level << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updatePeriodicEffect( const Effect* eff ) +{ + struct ff_effect event; + + PeriodicEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, &event.u.periodic.envelope, eff, &effect->envelope); + + event.type = FF_PERIODIC; + event.id = -1; + + switch( eff->type ) + { + case OIS::Effect::Square: + event.u.periodic.waveform = FF_SQUARE; + break; + case OIS::Effect::Triangle: + event.u.periodic.waveform = FF_TRIANGLE; + break; + case OIS::Effect::Sine: + event.u.periodic.waveform = FF_SINE; + break; + case OIS::Effect::SawToothUp: + event.u.periodic.waveform = FF_SAW_UP; + break; + case OIS::Effect::SawToothDown: + event.u.periodic.waveform = FF_SAW_DOWN; + break; + // Note: No support for Custom periodic force effect for the moment + //case OIS::Effect::Custom: + //event.u.periodic.waveform = FF_CUSTOM; + //break; + default: + OIS_EXCEPT(E_General, "No such available effect for Periodic force!"); + break; + } + + event.u.periodic.period = LinuxDuration(effect->period); + event.u.periodic.magnitude = LinuxPositiveLevel(effect->magnitude); + event.u.periodic.offset = LinuxPositiveLevel(effect->offset); + event.u.periodic.phase = (__u16)(effect->phase*event.u.periodic.period/36000.0); // ????? + + // Note: No support for Custom periodic force effect for the moment + event.u.periodic.custom_len = 0; + event.u.periodic.custom_data = 0; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Magnitude : " << effect->magnitude + << " => " << event.u.periodic.magnitude << endl + << " Period : " << effect->period + << " => " << event.u.periodic.period << endl + << " Offset : " << effect->offset + << " => " << event.u.periodic.offset << endl + << " Phase : " << effect->phase + << " => " << event.u.periodic.phase << endl; +#endif + + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_updateConditionalEffect( const Effect* eff ) +{ + struct ff_effect event; + + ConditionalEffect *effect = static_cast(eff->getForceEffect()); + + _setCommonProperties(&event, NULL, eff, NULL); + + switch( eff->type ) + { + case OIS::Effect::Friction: + event.type = FF_FRICTION; + break; + case OIS::Effect::Damper: + event.type = FF_DAMPER; + break; + case OIS::Effect::Inertia: + event.type = FF_INERTIA; + break; + case OIS::Effect::Spring: + event.type = FF_SPRING; + break; + default: + OIS_EXCEPT(E_General, "No such available effect for Conditional force!"); + break; + } + + event.id = -1; + + event.u.condition[0].right_saturation = LinuxSignedLevel(effect->rightSaturation); + event.u.condition[0].left_saturation = LinuxSignedLevel(effect->leftSaturation); + event.u.condition[0].right_coeff = LinuxSignedLevel(effect->rightCoeff); + event.u.condition[0].left_coeff = LinuxSignedLevel(effect->leftCoeff); + event.u.condition[0].deadband = LinuxPositiveLevel(effect->deadband);// Unit ?? + event.u.condition[0].center = LinuxSignedLevel(effect->center); // Unit ?? TODO ? + + // TODO support for second condition + event.u.condition[1] = event.u.condition[0]; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << " Condition[0] : " << endl + << " RightSaturation : " << effect->rightSaturation + << " => " << event.u.condition[0].right_saturation << endl + << " LeftSaturation : " << effect->leftSaturation + << " => " << event.u.condition[0]. left_saturation << endl + << " RightCoefficient : " << effect->rightCoeff + << " => " << event.u.condition[0].right_coeff << endl + << " LeftCoefficient : " << effect->leftCoeff + << " => " << event.u.condition[0].left_coeff << endl + << " DeadBand : " << effect->deadband + << " => " << event.u.condition[0].deadband << endl + << " Center : " << effect->center + << " => " << event.u.condition[0].center << endl; + cout << " Condition[1] : Not implemented" << endl; +#endif + _upload(&event, eff); +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_upload( struct ff_effect* ffeffect, const Effect* effect) +{ + struct ff_effect *linEffect = 0; + + //Get the effect - if it exists + EffectList::iterator i = mEffectList.find(effect->_handle); + //It has been created already + if( i != mEffectList.end() ) + linEffect = i->second; + + if( linEffect == 0 ) + { +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Adding new effect : " + << Effect::getEffectTypeName(effect->type) << endl; +#endif + + //This effect has not yet been created, so create it in the device + if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { + // TODO device full check + // OIS_EXCEPT(E_DeviceFull, "Remove an effect before adding more!"); + OIS_EXCEPT(E_General, "Unknown error creating effect (may be the device is full)->.."); + } + + // Save returned effect handle + effect->_handle = ffeffect->id; + + // Save a copy of the uploaded effect for later simple modifications + linEffect = (struct ff_effect *)calloc(1, sizeof(struct ff_effect)); + memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); + + mEffectList[effect->_handle] = linEffect; + + // Start playing the effect. + _start(effect->_handle); + } + else + { +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Replacing effect : " + << Effect::getEffectTypeName(effect->type) << endl; +#endif + + // Keep same id/handle, as this is just an update in the device. + ffeffect->id = effect->_handle; + + // Update effect in the device. + if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { + OIS_EXCEPT(E_General, "Unknown error updating an effect->.."); + } + + // Update local linEffect for next time. + memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); + } + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << "LinuxForceFeedback("<< mJoyStick + << ") : Effect handle : " << effect->_handle << endl; +#endif +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_stop( int handle) { + struct input_event stop; + + stop.type = EV_FF; + stop.code = handle; + stop.value = 0; + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Stopping effect with handle " << handle << endl; +#endif + + if (write(mJoyStick, &stop, sizeof(stop)) != sizeof(stop)) { + OIS_EXCEPT(E_General, "Unknown error stopping effect->.."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_start( int handle) { + struct input_event play; + + play.type = EV_FF; + play.code = handle; + play.value = 1; // Play once. + +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Starting effect with handle " << handle << endl; +#endif + + if (write(mJoyStick, &play, sizeof(play)) != sizeof(play)) { + OIS_EXCEPT(E_General, "Unknown error playing effect->.."); + } +} + +//--------------------------------------------------------------// +void LinuxForceFeedback::_unload( int handle) +{ +#if (OIS_LINUX_JOYFF_DEBUG > 1) + cout << endl << "LinuxForceFeedback("<< mJoyStick + << ") : Removing effect with handle " << handle << endl; +#endif + + if (ioctl(mJoyStick, EVIOCRMFF, handle) == -1) { + OIS_EXCEPT(E_General, "Unknown error removing effect->.."); + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp new file mode 100644 index 0000000..87dd977 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp @@ -0,0 +1,308 @@ +/* +The zlib/libpng License + +Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) + +This software is provided 'as-is', without any express or implied warranty. In no event will +the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial +applications, and to alter it and redistribute it freely, subject to the following +restrictions: + + 1. The origin of this software must not be misrepresented; you must not claim that + you wrote the original software. If you use this software in a product, + an acknowledgment in the product documentation would be appreciated but is + not required. + + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "OISConfig.h" + +#include "linux/LinuxJoyStickEvents.h" +#include "linux/LinuxInputManager.h" +#include "linux/LinuxForceFeedback.h" +#include "linux/EventHelpers.h" + +#include "OISEvents.h" +#include "OISException.h" + +#include //Needed to Open a file descriptor +#ifdef HAVE_UNISTD_H +#include +#endif +#include +#include + + +#include +# include +using namespace std; + +using namespace OIS; + +//#define OIS_LINUX_JOY_DEBUG + +//-------------------------------------------------------------------// +LinuxJoyStick::LinuxJoyStick(InputManager* creator, bool buffered, const JoyStickInfo& js) + : JoyStick(js.vendor, buffered, js.devId, creator) +{ + mJoyStick = js.joyFileD; + + mState.mAxes.clear(); + mState.mAxes.resize(js.axes); + mState.mButtons.clear(); + mState.mButtons.resize(js.buttons); + + mPOVs = js.hats; + + mButtonMap = js.button_map; + mAxisMap = js.axis_map; + mRanges = js.axis_range; + + ff_effect = 0; +} + +//-------------------------------------------------------------------// +LinuxJoyStick::~LinuxJoyStick() +{ + EventUtils::removeForceFeedback( &ff_effect ); +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::_initialize() +{ + //Clear old joy state + mState.mAxes.resize(mAxisMap.size()); + mState.clear(); + + //This will create and new us a force feedback structure if it exists + EventUtils::enumerateForceFeedback( mJoyStick, &ff_effect ); + + if( mJoyStick == -1 ) + OIS_EXCEPT(E_InputDeviceNonExistant, "LinuxJoyStick::_initialize() >> JoyStick Not Found!"); +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::capture() +{ + static const short POV_MASK[8] = {0,0,1,1,2,2,3,3}; + + //Used to determine if an axis has been changed and needs an event + bool axisMoved[32] = {false, false, false, false, false, false, false, false, false, false, false, false, false, + false, false, false, false, false, false, false, false, false, false, false, false, false, + false, false, false, false, false, false}; + + //We are in non blocking mode - we just read once, and try to fill up buffer + input_event js[JOY_BUFFERSIZE]; + while(true) + { + int ret = read(mJoyStick, &js, sizeof(struct input_event) * JOY_BUFFERSIZE); + if( ret < 0 ) + break; + + //Determine how many whole events re read up + ret /= sizeof(struct input_event); + for(int i = 0; i < ret; ++i) + { + switch(js[i].type) + { + case EV_KEY: //Button + { + int button = mButtonMap[js[i].code]; + + #ifdef OIS_LINUX_JOY_DEBUG + cout << "\nButton Code: " << js[i].code << ", OIS Value: " << button << endl; + #endif + + //Check to see whether push or released event... + if(js[i].value) + { + mState.mButtons[button] = true; + if( mBuffered && mListener ) + if(!mListener->buttonPressed(JoyStickEvent(this,mState), button)) return; + } + else + { + mState.mButtons[button] = false; + if( mBuffered && mListener ) + if(!mListener->buttonReleased(JoyStickEvent(this,mState), button)) return; + } + break; + } + + case EV_ABS: //Absolute Axis + { + //A Stick (BrakeDefine is the highest possible Axis) + if( js[i].code <= ABS_BRAKE ) + { + int axis = mAxisMap[js[i].code]; + assert( axis < 32 && "Too many axes (Max supported is 32). Report this to OIS forums!" ); + + axisMoved[axis] = true; + + //check for rescaling: + if( mRanges[axis].min == JoyStick::MIN_AXIS && mRanges[axis].max != JoyStick::MAX_AXIS ) + { //Scale is perfect + mState.mAxes[axis].abs = js[i].value; + } + else + { //Rescale + float proportion = (float)(js[i].value-mRanges[axis].max)/(float)(mRanges[axis].min-mRanges[axis].max); + mState.mAxes[axis].abs = (int)(32767.0f - (65535.0f * proportion)); + } + } + else if( js[i].code <= ABS_HAT3Y ) //A POV - Max four POVs allowed + { + //Normalise the POV to between 0-7 + //Even is X Axis, Odd is Y Axis + unsigned char LinuxPovNumber = js[i].code - 16; + short OIS_POVIndex = POV_MASK[LinuxPovNumber]; + + //Handle X Axis first (Even) (left right) + if((LinuxPovNumber & 0x0001) == 0) + { + //Why do this? Because, we use a bit field, and when this axis is east, + //it can't possibly be west too. So clear out the two X axes, then refil + //it in with the new direction bit. + //Clear the East/West Bit Flags first + mState.mPOV[OIS_POVIndex].direction &= 0x11110011; + if( js[i].value == -1 ) //Left + mState.mPOV[OIS_POVIndex].direction |= Pov::West; + else if( js[i].value == 1 ) //Right + mState.mPOV[OIS_POVIndex].direction |= Pov::East; + } + //Handle Y Axis (Odd) (up down) + else + { + //Clear the North/South Bit Flags first + mState.mPOV[OIS_POVIndex].direction &= 0x11111100; + if( js[i].value == -1 ) //Up + mState.mPOV[OIS_POVIndex].direction |= Pov::North; + else if( js[i].value == 1 ) //Down + mState.mPOV[OIS_POVIndex].direction |= Pov::South; + } + + if( mBuffered && mListener ) + if( mListener->povMoved( JoyStickEvent(this,mState), OIS_POVIndex) == false ) + return; + } + break; + } + + + case EV_REL: //Relative Axes (Do any joystick actually have a relative axis?) + #ifdef OIS_LINUX_JOY_DEBUG + cout << "\nWarning: Relatives axes not supported yet" << endl; + #endif + break; + default: break; + } + } + } + + //All axes and POVs are combined into one movement per pair per captured frame + if( mBuffered && mListener ) + { + for( int i = 0; i < 32; ++i ) + if( axisMoved[i] ) + if( mListener->axisMoved( JoyStickEvent(this,mState), i) == false ) + return; + } +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::setBuffered(bool buffered) +{ + if( buffered != mBuffered ) + { + mBuffered = buffered; + _initialize(); + } +} + +//-------------------------------------------------------------------// +JoyStickInfo LinuxJoyStick::_getJoyInfo() +{ + JoyStickInfo js; + + js.devId = mDevID; + js.joyFileD = mJoyStick; + js.vendor = mVendor; + js.axes = (int)mState.mAxes.size(); + js.buttons = (int)mState.mButtons.size(); + js.hats = mPOVs; + js.button_map = mButtonMap; + js.axis_map = mAxisMap; + js.axis_range = mRanges; + + return js; +} + +//-------------------------------------------------------------------// +JoyStickInfoList LinuxJoyStick::_scanJoys() +{ + JoyStickInfoList joys; + + //Search through all of the event devices.. and identify which ones are joysticks + //xxx move this to InputManager, as it can also scan all other events + for(int i = 0; i < 64; ++i ) + { + stringstream s; + s << "/dev/input/event" << i; + int fd = open( s.str().c_str(), O_RDWR |O_NONBLOCK ); + if(fd == -1) + continue; + + #ifdef OIS_LINUX_JOY_DEBUG + cout << "Opening " << s.str() << "..." << endl; + #endif + try + { + JoyStickInfo js; + if( EventUtils::isJoyStick(fd, js) ) + { + joys.push_back(js); + #ifdef OIS_LINUX_JOY_DEBUG + cout << "=> Joystick added to list." << endl; + #endif + } + else + { + #ifdef OIS_LINUX_JOY_DEBUG + cout << "=> Not a joystick." << endl; + #endif + close(fd); + } + } + catch(...) + { + #ifdef OIS_LINUX_JOY_DEBUG + cout << "Exception caught!!" << endl; + #endif + close(fd); + } + } + + return joys; +} + +//-------------------------------------------------------------------// +void LinuxJoyStick::_clearJoys(JoyStickInfoList &joys) +{ + for(JoyStickInfoList::iterator i = joys.begin(); i != joys.end(); ++i) + close(i->joyFileD); + joys.clear(); +} + +//-------------------------------------------------------------------// +Interface* LinuxJoyStick::queryInterface(Interface::IType type) +{ + if( ff_effect && type == Interface::ForceFeedback ) + return ff_effect; + + return 0; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt new file mode 100644 index 0000000..65ae11f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt @@ -0,0 +1,546 @@ +# cmake_minimum_required (VERSION 2.6) +# project (OXYGINE) +# +# include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") +# cmaki_find_package(sdl2 REQUIRED) +# cmaki_find_package(freeimage REQUIRED) +# cmaki_find_package(dune-zlib REQUIRED) +# cmaki_find_package(haxx-libcurl REQUIRED) +# +# include_directories(${CMAKI_INCLUDE_DIRS}) +# set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) +# +# if (EMSCRIPTEN) +# #don't need SDL2 +# elseif (WIN32) +# #hardcoded path to SDL2 on windows +# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# else(WIN32) +# # find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) +# # message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) +# # +# # if (SDL2_INCLUDE_DIRS) +# # set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) +# # message(STATUS "found") +# # else() +# # message(STATUS "SDL notfound") +# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# # endif() +# +# find_package(CURL) +# endif(EMSCRIPTEN) +# +# +# set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) +# set(OXYGINE_SRC ${OXYGINE_ROOT}/src) +# +# set(FOLDERS src src/closure src/minizip src/core +# src/core/gl src/dev_tools src/minizip +# src/math src/pugixml src/json src/res +# src/text_utils src/utils src/winnie_alloc) +# +# +# if (EMSCRIPTEN) +# set(PLATFORM emscripten) +# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") +# set(PLATFORM linux) +# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +# set(PLATFORM ios) +# elseif(MSVC) +# set(PLATFORM win32) +# elseif(MINGW) +# set(PLATFORM win32_mingw) +# endif() +# +# +# set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) +# +# +# +# if (EMSCRIPTEN) +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_HTTP 1) +# set(OX_USE_SDL2 0) +# +# set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) +# +# +# file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) +# +# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") +# +# set(OX_HAVE_LIBJPEG 1) +# set(OX_HAVE_LIBPNG 1) +# +# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +# elseif(MSVC) +# +# set(OX_HAVE_LIBJPEG 1) +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_LIBCURL 1) +# set(OX_HAVE_HTTP 1) +# +# set(libprefix lib) +# +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) +# +# elseif(MINGW) +# +# set(libprefix lib) +# +# set(OX_HAVE_LIBPNG 1) +# set(OX_HAVE_LIBCURL 1) +# set(OX_HAVE_HTTP 1) +# +# endif() +# +# if (OX_HAVE_LIBCURL) +# set(FOLDERS ${FOLDERS} src/core/curl) +# include_directories(${THIRD_PARTY}/curl/) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) +# endif(OX_HAVE_LIBCURL) +# +# +# if (NOT OX_HAVE_HTTP) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) +# endif(NOT OX_HAVE_HTTP) +# +# +# +# foreach(ITEM ${FOLDERS}) +# file(GLOB FLS +# ${OXYGINE_ROOT}/${ITEM}/*.cpp +# ${OXYGINE_ROOT}/${ITEM}/*.c +# ${OXYGINE_ROOT}/${ITEM}/*.h) +# set(SOURCES ${SOURCES} ${FLS}) +# string(REPLACE / \\ SGROUP ${ITEM}) +# source_group(${SGROUP} FILES ${FLS}) +# endforeach(ITEM) +# +# +# set(OXYGINE_INCLUDE_DIRS +# ${OXYGINE_SRC} +# ${THIRD_PARTY}/pthreads/include/ +# ${THIRD_PARTY}/zlib) +# +# +# set(OXYGINE_LIBRARY_DIRS +# ${OXYGINE_LIBRARY_DIRS} +# ${OXYGINE_SOURCE_DIR}/libs +# ${THIRD_PARTY}/libraries) +# +# +# if (FORCE_GLES) +# set(OPENGL_LIBRARIES libGLESv2.lib) +# endif(FORCE_GLES) +# +# +# if (MINGW) +# set(CORE_LIBS ${CORE_LIBS} mingw32) +# endif(MINGW) +# +# +# set(CORE_LIBS +# ${CORE_LIBS} +# oxygine-framework +# ${OPENGL_LIBRARIES} +# ) +# +# +# if (OX_USE_SDL2) +# set(CORE_LIBS ${CORE_LIBS} +# SDL2main SDL2) +# set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) +# endif(OX_USE_SDL2) +# +# +# if (WIN32) +# set(CORE_LIBS ${CORE_LIBS} +# pthreadVCE2 +# libcurl_imp +# ws2_32) +# elseif(EMSCRIPTEN) +# else(WIN32) +# set(CORE_LIBS ${CORE_LIBS} pthread) +# endif(WIN32) +# +# +# +# if (OX_HAVE_LIBPNG) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) +# include_directories(${THIRD_PARTY}/libpng) +# set(LIBPNG ${libprefix}png) +# +# if (MSVC) +# if (MSVC_VERSION EQUAL "1900") +# set(LIBPNG ${LIBPNG}-2015) +# endif() +# elseif(EMSCRIPTEN) +# set(LIBPNG libz libpng16) +# endif() +# +# set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) +# endif(OX_HAVE_LIBPNG) +# +# +# if (OX_HAVE_LIBJPEG) +# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) +# include_directories(${THIRD_PARTY}/libjpeg) +# set(LIBJPEG ${libprefix}jpeg) +# +# if (MSVC) +# if (MSVC_VERSION EQUAL "1900") +# set(LIBJPEG ${LIBJPEG}-2015) +# endif() +# endif() +# +# set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) +# endif(OX_HAVE_LIBJPEG) +# +# +# if (NOT EMSCRIPTEN) +# set(CORE_LIBS ${CORE_LIBS} +# ${libprefix}z${libprefix}) +# endif(NOT EMSCRIPTEN) +# +# +# if (NOT MSVC) +# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ") +# endif(NOT MSVC) +# +# +# add_definitions(${OX_DEFINITIONS}) +# include_directories(${OXYGINE_INCLUDE_DIRS}) +# add_library(oxygine-framework STATIC ${SOURCES}) +# +# +# set(OXYGINE_LIBRARY_DIRS +# ${OXYGINE_LIBRARY_DIRS} +# PARENT_SCOPE) +# +# set(OXYGINE_CORE_LIBS +# ${CORE_LIBS} +# PARENT_SCOPE) +# +# set(OXYGINE_DEFINITIONS +# ${OX_DEFINITIONS} +# PARENT_SCOPE) +# +# set(OXYGINE_INCLUDE_DIRS +# ${OXYGINE_INCLUDE_DIRS} +# PARENT_SCOPE) +# +# set(OXYGINE_JS_LIBRARIES +# ${OXYGINE_JS_LIBRARIES} +# PARENT_SCOPE) +# +# message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") +# message(STATUS "Libs: ${CORE_LIBS}") +# message(STATUS "Platform: ${PLATFORM}") +# +# set(CMAKE_INSTALL_PREFIX ../libs) +# install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) +# install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) + + + + + + + + + + + + + + + + + + + + + + + + + + +cmake_minimum_required (VERSION 2.6) +project (OXYGINE) + +include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") +cmaki_find_package(sdl2 REQUIRED) +cmaki_find_package(freeimage REQUIRED) +cmaki_find_package(dune-zlib REQUIRED) +cmaki_find_package(haxx-libcurl REQUIRED) + +include_directories(${CMAKI_INCLUDE_DIRS}) +set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) + +# find_package(OpenGL) +# +# if (EMSCRIPTEN) +# #don't need SDL2 +# elseif (WIN32) +# #hardcoded path to SDL2 on windows +# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# else(WIN32) +# find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) +# message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) +# +# if (SDL2_INCLUDE_DIRS) +# set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) +# message(STATUS "found") +# else() +# message(STATUS "SDL not found") +# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) +# endif() +# +# find_package(CURL) +# endif(EMSCRIPTEN) + + +set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) +set(OXYGINE_SRC ${OXYGINE_ROOT}/src) + +set(FOLDERS src src/closure src/minizip src/core + src/core/gl src/dev_tools src/minizip + src/math src/pugixml src/json src/res + src/text_utils src/utils src/winnie_alloc) + + +if (EMSCRIPTEN) + set(PLATFORM emscripten) +elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + set(PLATFORM linux) +elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") + set(PLATFORM ios) +elseif(MSVC) + set(PLATFORM win32) +elseif(MINGW) + set(PLATFORM win32_mingw) +endif() + + +set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) + + + +if (EMSCRIPTEN) + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_HTTP 1) + set(OX_USE_SDL2 1) + + set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) + + + file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) + + set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -s USE_SDL=2 -s USE_LIBPNG=1 -s USE_ZLIB=1 -s FULL_ES2=1 ") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s USE_ZLIB=1")#for minizip.c + +elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") + + set(OX_HAVE_LIBJPEG 1) + set(OX_HAVE_LIBPNG 1) + +elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") +elseif(MSVC) + + set(OX_HAVE_LIBJPEG 1) + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_LIBCURL 1) + set(OX_HAVE_HTTP 1) + + set(libprefix lib) + + set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) + +elseif(MINGW) + + set(libprefix lib) + + set(OX_HAVE_LIBPNG 1) + set(OX_HAVE_LIBCURL 1) + set(OX_HAVE_HTTP 1) + +endif() + +if (OX_HAVE_LIBCURL) + set(FOLDERS ${FOLDERS} src/core/curl) + include_directories(${THIRD_PARTY}/curl/) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) +endif(OX_HAVE_LIBCURL) + + + +if (NOT OX_HAVE_HTTP) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) +endif(NOT OX_HAVE_HTTP) + +if (EMSCRIPTEN) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_MT) +endif(EMSCRIPTEN) + +foreach(ITEM ${FOLDERS}) + file(GLOB FLS + ${OXYGINE_ROOT}/${ITEM}/*.cpp + ${OXYGINE_ROOT}/${ITEM}/*.c + ${OXYGINE_ROOT}/${ITEM}/*.h) + set(SOURCES ${SOURCES} ${FLS}) + string(REPLACE / \\ SGROUP ${ITEM}) + source_group(${SGROUP} FILES ${FLS}) +endforeach(ITEM) + + +set(OXYGINE_INCLUDE_DIRS + ${OXYGINE_SRC} + ${THIRD_PARTY}/pthreads/include/ + ${THIRD_PARTY}/zlib) + + +set(OXYGINE_LIBRARY_DIRS + ${OXYGINE_LIBRARY_DIRS} + ${OXYGINE_SOURCE_DIR}/libs + ${THIRD_PARTY}/libraries) + + +if (FORCE_GLES) + set(OPENGL_LIBRARIES libGLESv2.lib) +endif(FORCE_GLES) + + +if (MINGW) + set(CORE_LIBS ${CORE_LIBS} mingw32) +endif(MINGW) + + +set(CORE_LIBS + ${CORE_LIBS} + oxygine-framework + ${OPENGL_LIBRARIES} +) + + +if (OX_USE_SDL2) + set(CORE_LIBS ${CORE_LIBS} + SDL2main SDL2) + set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) +endif(OX_USE_SDL2) + + +if (WIN32) + set(CORE_LIBS ${CORE_LIBS} + pthreadVCE2 + libcurl_imp + ws2_32) +elseif(EMSCRIPTEN) +else(WIN32) + set(CORE_LIBS ${CORE_LIBS} pthread) +endif(WIN32) + + + +if (OX_HAVE_LIBPNG) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) + + if (EMSCRIPTEN) + + else(EMSCRIPTEN) + + include_directories(${THIRD_PARTY}/libpng) + set(LIBPNG ${libprefix}png) + + if (MSVC) + if(NOT (MSVC_VERSION LESS 1900)) + set(LIBPNG ${LIBPNG}-2015) + endif() + endif() + + set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) + + endif(EMSCRIPTEN) + +endif(OX_HAVE_LIBPNG) + + +if (OX_HAVE_LIBJPEG) + set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) + include_directories(${THIRD_PARTY}/libjpeg) + set(LIBJPEG ${libprefix}jpeg) + + if (MSVC) + if(NOT (MSVC_VERSION LESS 1900)) + set(LIBJPEG ${LIBJPEG}-2015) + endif() + endif() + + set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) +endif(OX_HAVE_LIBJPEG) + + +if (NOT EMSCRIPTEN) + set(CORE_LIBS ${CORE_LIBS} + ${libprefix}z${libprefix}) +endif(NOT EMSCRIPTEN) + + +if (NOT MSVC) + set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -std=c++11 ") +endif(NOT MSVC) + +set(CMAKE_CXX_FLAGS ${OXYGINE_CXX_FLAGS}) + +add_definitions(${OX_DEFINITIONS}) +include_directories(${OXYGINE_INCLUDE_DIRS}) +add_library(oxygine-framework STATIC ${SOURCES}) + + +set(OXYGINE_LIBRARY_DIRS + ${OXYGINE_LIBRARY_DIRS} + PARENT_SCOPE) + +set(OXYGINE_CORE_LIBS + ${CORE_LIBS} + PARENT_SCOPE) + +set(OXYGINE_DEFINITIONS + ${OX_DEFINITIONS} + PARENT_SCOPE) + +set(OXYGINE_INCLUDE_DIRS + ${OXYGINE_INCLUDE_DIRS} + PARENT_SCOPE) + +set(OXYGINE_JS_LIBRARIES + ${OXYGINE_JS_LIBRARIES} + PARENT_SCOPE) + +set(OXYGINE_CXX_FLAGS + ${OXYGINE_CXX_FLAGS} + PARENT_SCOPE) + + + +message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") +message(STATUS "Libs: ${CORE_LIBS}") +message(STATUS "Platform: ${PLATFORM}") + +set(CMAKE_INSTALL_PREFIX ../libs) +install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) +install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) + + + + + + + + + + + + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml new file mode 100644 index 0000000..bbdc966 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml @@ -0,0 +1,13 @@ +- assimp: + <<: *thirdparty_defaults + version: 3.1.1.0 + mode: dr + source: http://downloads.sourceforge.net/project/assimp/assimp-3.1/assimp-3.1.1.zip + uncompress_strip: assimp-3.1.1 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + targets: + - assimp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml new file mode 100644 index 0000000..e2fe3a4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml @@ -0,0 +1,23 @@ +- box2d: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + cmake_target: null + cmake_prefix: ./Box2D/CMakeLists.txt + cmake_definitions: + - BOX2D_BUILD_EXAMPLES=OFF + - BUILD_SHARED_LIBS=ON + - BOX2D_BUILD_SHARED=ON + - BOX2D_BUILD_STATIC=OFF + # - CMAKE_POSITION_INDEPENDENT_CODE=ON + post_install: + - ./Box2D/Box2D/*.h include/Box2D/ RECURSIVE + - ./Box2D/libBox2D.a lib/ + mode: dr + source: https://github.com/erincatto/Box2D.git + branch: -b v2.3.1 + targets: + - Box2D: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml new file mode 100644 index 0000000..a33a569 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml @@ -0,0 +1,54 @@ +- bullet2: + <<: *thirdparty_defaults + version: 2.83.6.0 + source: https://github.com/bulletphysics/bullet3/archive/2.83.6.tar.gz + uncompress_strip: bullet3-2.83.6 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + references: &bullet2_common_extra + default: + include: + - include/bullet + targets: + - LinearMath: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletCollision: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletDynamics: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - BulletSoftBody: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + # optional targets + - BulletFileLoader: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - ConvexDecomposition: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - GIMPACTUtils: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + - HACD: + info: + <<: *library_dynamic_exact + extra: + <<: *bullet2_common_extra + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml new file mode 100644 index 0000000..59a451e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml @@ -0,0 +1,70 @@ +- cryptopp: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + post_install: + - ./*.h include + - ./*.a lib + cmake_target: null + mode: dr + source: https://github.com/weidai11/cryptopp.git + branch: -b CRYPTOPP_5_6_5 + unittest: + | + // https://www.cryptopp.com/wiki/ChannelSwitch + #include + #include + #include + #include + #include + #include + + int main(int argc, char *argv[]) + { + std::string message = "Now is the time for all good men to come to the aide of their country"; + + // Allow user to override default message from command line arg. + if(argc == 2 && argv[1] != NULL) + message = std::string(argv[1]); + + // Set hash variables + std::string s1, s2, s3, s4; + CryptoPP::SHA1 sha1; CryptoPP::SHA224 sha224; CryptoPP::SHA256 sha256; CryptoPP::SHA512 sha512; + + // Run hash functions + CryptoPP::HashFilter f1(sha1, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s1))); + CryptoPP::HashFilter f2(sha224, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s2))); + CryptoPP::HashFilter f3(sha256, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s3))); + CryptoPP::HashFilter f4(sha512, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s4))); + + // Set route to default + CryptoPP::ChannelSwitch cs; + cs.AddDefaultRoute(f1); + cs.AddDefaultRoute(f2); + cs.AddDefaultRoute(f3); + cs.AddDefaultRoute(f4); + + CryptoPP::StringSource ss(message, true /*pumpAll*/, new CryptoPP::Redirector(cs)); + + std::cout << "Message: " << message << std::endl; + std::cout << "SHA-1: " << s1 << std::endl; + std::cout << "SHA-224: " << s2 << std::endl; + std::cout << "SHA-256: " << s3 << std::endl; + std::cout << "SHA-512: " << s4 << std::endl; + } + cmake_definitions: + - BUILD_SHARED=OFF + - BUILD_SHARED_LIBS=OFF + - BUILD_STATIC=ON + - BUILD_TESTING=OFF + targets: + - cryptopp: + info: + <<: *library_static_exact + extra: + default: + definitions: + - -DCRYPTOPP_INIT_PRIORITY=1 + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml new file mode 100644 index 0000000..9ebf7cf --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml @@ -0,0 +1,28 @@ +- dune-freetype: + <<: *thirdparty_defaults + version: 1.0.0.0 + mode: dr + source: http://download.savannah.gnu.org/releases/freetype/freetype-2.6.tar.bz2 + uncompress_strip: freetype-2.6 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + unittest: + | + #include + #include FT_FREETYPE_H + int main() + { + FT_Library library; + FT_Init_FreeType( &library ); + return 0; + } + targets: + - freetype: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/freetype2 + - $PLATFORM/include/freetype2 + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml new file mode 100644 index 0000000..ccb589b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml @@ -0,0 +1,29 @@ +- dune-glew: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + cmake_target: null + mode: dr + source: https://github.com/nigels-com/glew.git + cmake_definitions: + - BUILD_SHARED_LIBS=ON + post_install: + - ./lib/* lib/ RECURSIVE + - ./include/* include/ RECURSIVE + build: + | + #!/bin/bash + pushd auto + make + popd + make -j $CORES + targets: + - GLEW: + info: + <<: *library_dynamic_exact + extra: + macos_64-clang_*-*: null + default: + system_depends: + - GL + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml new file mode 100644 index 0000000..04246cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml @@ -0,0 +1,38 @@ +- dune-zlib: + <<: *thirdparty_defaults + version: 1.2.11.0 + mask: w + source: https://zlib.net/zlib-1.2.11.tar.gz + uncompress_strip: zlib-1.2.11 + unittest: + | + #include + int main() + { + z_stream infstream; + return 0; + } + targets: + - zlib: + info: + <<: *library_dynamic_exact + +- dune-zlib: + <<: *thirdparty_defaults + version: 1.2.11.0 + mask: mls + source: https://zlib.net/zlib-1.2.11.tar.gz + uncompress_strip: zlib-1.2.11 + unittest: + | + #include + int main() + { + z_stream infstream; + return 0; + } + targets: + - z: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml new file mode 100644 index 0000000..1dc4f97 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml @@ -0,0 +1,20 @@ +- fmod: + <<: *thirdparty_defaults + version: 1.0.1.0 + source: $NPP_SERVER/sources/fmodstudioapi11000linux.tar.gz + uncompress_strip: fmodstudioapi11000linux/api/lowlevel + post_install: + - ./lib/x86_64/* lib/ + - ./inc/*.h* include/ + build: + | + #!/bin/bash + echo installing fmod + targets: + - fmod: + info: + <<: *library_dynamic_exact + - fmodL: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml new file mode 100644 index 0000000..856f116 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml @@ -0,0 +1,36 @@ +- freeimage: + <<: *thirdparty_defaults + version: 3.1.7.0 + source: https://github.com/Kanma/FreeImage + cmake_target: null + post_install: + - ./lib/*.a lib/ + targets: + - freeimage: + info: + <<: *library_static_exact + - jpeg: + info: + <<: *library_static_exact + - mng: + info: + <<: *library_static_exact + - openexr: + info: + <<: *library_static_exact + - openjpeg: + info: + <<: *library_static_exact + - png: + info: + <<: *library_static_exact + - rawlite: + info: + <<: *library_static_exact + - tiff: + info: + <<: *library_static_exact + - zlib: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml new file mode 100644 index 0000000..c9352be --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml @@ -0,0 +1,40 @@ +- freeimage: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + cmake_target: null + post_install: + - ./*.h include RECURSIVE + - ./lib/*.a lib + mode: dr + source: https://github.com/Kanma/FreeImage.git + targets: + - freeimage: + info: + <<: *library_static_exact + # - zlib: + # info: + # <<: *library_static_exact + - tiff: + info: + <<: *library_static_exact + - rawlite: + info: + <<: *library_static_exact + - png: + info: + <<: *library_static_exact + - openjpeg: + info: + <<: *library_static_exact + - openexr: + info: + <<: *library_static_exact + - mng: + info: + <<: *library_static_exact + - jpeg: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml new file mode 100644 index 0000000..cf94535 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml @@ -0,0 +1,61 @@ +- google-gmock: + <<: *thirdparty_defaults + mask: w + source: https://github.com/google/googletest.git + branch: -b release-1.8.0 + post_install: + - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE + - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE + cmake_definitions: + - GTEST_LINKED_AS_SHARED_LIBRARY=1 + - BUILD_SHARED_LIBS=ON + - BUILD_GTEST=ON + - BUILD_GMOCK=ON + - gtest_build_samples=OFF + - gtest_build_tests=OFF + - gtest_disable_pthreads=OFF + - gmock_build_tests=OFF + - INSTALL_GTEST=ON + - INSTALL_GMOCK=ON + targets: + - gtest: + info: + <<: *library_dynamic_exact + - gmock: + info: + <<: *library_dynamic_exact + - gmock_main: + info: + <<: *library_dynamic_exact + + +- google-gmock: + <<: *thirdparty_defaults + mask: mls + source: https://github.com/google/googletest.git + branch: -b release-1.8.0 + post_install: + - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE + - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE + cmake_definitions: + - BUILD_SHARED_LIBS=OFF + - BUILD_GTEST=ON + - BUILD_GMOCK=ON + - gtest_build_samples=OFF + - gtest_build_tests=OFF + - gtest_disable_pthreads=OFF + - gmock_build_tests=OFF + - INSTALL_GTEST=ON + - INSTALL_GMOCK=ON + targets: + - gtest: + info: + <<: *library_static_exact + - gmock: + info: + <<: *library_static_exact + - gmock_main: + info: + <<: *library_static_exact + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml new file mode 100644 index 0000000..ffd8870 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml @@ -0,0 +1,11 @@ +- gwen: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + source: https://github.com/garrynewman/GWEN.git + targets: + - gwen: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml new file mode 100644 index 0000000..8c14ec5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml @@ -0,0 +1,71 @@ +- haxx-libcurl: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/curl/curl.git + branch: -b curl-7_59_0 + depends: + - openssl + cmake_definitions: + - CMAKE_USE_OPENSSL=ON + unittest: + | + #include + #include + int main() + { + CURL* curl = curl_easy_init(); + return 0; + } + references: + library_dynamic: &library_dynamic_curl + common: &library_dynamic_common_curl + include: + - $PLATFORM/include + - include + windows: &library_dynamic_windows_curl + <<: *library_dynamic_common_curl + dynamic: + debug: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + lib$TARGET.pdb + relwithdebinfo: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + lib$TARGET.pdb + release: + dll: + lib$TARGET.dll + lib: + lib$TARGET_imp.lib + pdb: + null + + unix: &library_dynamic_unix_curl + <<: *library_dynamic_common_curl + dynamic: + debug: + so: + lib/lib$TARGET-d.so + relwithdebinfo: + so: + lib/lib$TARGET.so + release: + so: + lib/lib$TARGET.so + windows_*-msvc_*-*: + <<: *library_dynamic_windows_curl + default: + <<: *library_dynamic_unix_curl + targets: + - curl: + info: + <<: *library_dynamic_curl + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml new file mode 100644 index 0000000..e8920b1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml @@ -0,0 +1,26 @@ +- json: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + version_manager: git + post_install: + - ./src/*.h* include + cmake_target: null + source: https://github.com/nlohmann/json.git + branch: -b v3.0.1 + cmake_definitions: + - JSON_BuildTests=OFF + unittest: + | + #include + using json = nlohmann::json; + int main() + { + json j1; + return 0; + } + targets: + - dummy: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml new file mode 100644 index 0000000..05d54dd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml @@ -0,0 +1,24 @@ +- librocket: + <<: *thirdparty_defaults + version: 0.0.0.0 + mode: dr + cmake_target: null + post_install: + - ./Include/Rocket/*.h include/Rocket/ recursive + - ./Include/Rocket/*.inl include/Rocket/ recursive + version_manager: git + source: https://github.com/libRocket/libRocket.git + branch: -b stable + depends: + - dune-freetype + targets: + - RocketCore: + info: + <<: *library_dynamic_exact + - RocketDebugger: + info: + <<: *library_dynamic_exact + - RocketControls: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml new file mode 100644 index 0000000..7d76144 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml @@ -0,0 +1,10 @@ +- msgpack: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/msgpack/msgpack-c.git + targets: + - msgpackc: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml new file mode 100644 index 0000000..4cbfa70 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml @@ -0,0 +1,11 @@ +- noise: + <<: *thirdparty_defaults + version: 1.0.0.0 + mode: dr + source: http://downloads.sourceforge.net/project/libnoise/libnoise%20sources/1.0.0/libnoisesrc-1.0.0.zip + uncompress_strip: noise + targets: + - noise: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml new file mode 100644 index 0000000..06bada0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml @@ -0,0 +1,19 @@ +- ois: + <<: *thirdparty_defaults + version: 1.3.0.0 + mode: dr + source: http://downloads.sourceforge.net/project/wgois/Source%20Release/1.3/ois_v1-3.tar.gz + uncompress_strip: ois-v1-3 + build: + | + #!/bin/bash + # depends: libxaw7-dev + source find.script + chmod +x bootstrap + ./bootstrap + ./configure --prefix=$ois_HOME && make -j $CORES && make -j $CORES install + exit $? + targets: + - OIS: + info: + <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml new file mode 100644 index 0000000..4011d09 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml @@ -0,0 +1,24 @@ +- openssl: + <<: *thirdparty_defaults + source: https://github.com/pol51/OpenSSL-CMake.git + branch: -b OpenSSL_1_1_0 + build: + | + #!/bin/bash + # if [[ $BUILD_MODE == 'Debug' ]] + # then + # ./Configure --openssldir=$SELFHOME debug-linux-x86_64 + # else + # ./Configure --openssldir=$SELFHOME linux-x86_64 + # fi + ./config --prefix=$SELFHOME + make + make install + targets: + - ssl: + info: + <<: *library_static_exact + - crypto: + info: + <<: *library_static_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml new file mode 100644 index 0000000..eb53ab4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml @@ -0,0 +1,25 @@ +- oxygine: + <<: *thirdparty_defaults + cmake_target: null + cmake_definitions: + - BUILD_SHARED_LIBS=OFF + - CMAKE_POSITION_INDEPENDENT_CODE=ON + - OX_HAVE_LIBJPEG=1 + - OX_HAVE_LIBPNG=1 + - OX_HAVE_LIBCURL=1 + - OX_HAVE_HTTP=0 + - OX_USE_SDL2=1 + depends: + - sdl2 + - freeimage + - haxx-libcurl + source: https://github.com/oxygine/oxygine-framework.git + targets: + - oxygine-framework: + info: + <<: *library_static_exact + extra: + default: + definitions: + - -DOXYGINE_SDL=1 + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml new file mode 100644 index 0000000..0d9c5f9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml @@ -0,0 +1,22 @@ +- paho-mqtt3: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + post_install: + - ./src/*.h include + source: https://github.com/eclipse/paho.mqtt.c.git + branch: -b develop + cmake_definitions: + - BUILD_SHARED_LIBS=ON + - BUILD_TESTING=OFF + - BUILD_STATIC=OFF + - BUILD_SHARED=ON + targets: + - paho-mqtt3c: + info: + <<: *library_dynamic_exact + - paho-mqtt3a: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml new file mode 100644 index 0000000..5d52565 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml @@ -0,0 +1,21 @@ +- paho-mqttpp3: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + mode: dr + depends: + - paho-mqtt3 + post_install: + - ./src/mqtt/*.h include/mqtt + mode: dr + source: https://github.com/eclipse/paho.mqtt.cpp.git + cmake_definitions: + - BUILD_SHARED_LIBS=ON + - BUILD_TESTING=OFF + - BUILD_STATIC=OFF + - BUILD_SHARED=ON + - PAHO_WITH_SSL=OFF + targets: + - paho-mqttpp3: + info: + <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml new file mode 100644 index 0000000..df8c388 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml @@ -0,0 +1,11 @@ +- pugixml: + <<: *thirdparty_defaults + source: http://github.com/zeux/pugixml/releases/download/v1.8/pugixml-1.8.tar.gz + uncompress_strip: pugixml-1.8 + cmake_definitions: + - BUILD_SHARED_LIBS=ON + targets: + - pugixml: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml new file mode 100644 index 0000000..bc7cb10 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml @@ -0,0 +1,21 @@ +- python: + <<: *thirdparty_defaults + source: https://github.com/python-cmake-buildsystem/python-cmake-buildsystem.git + cmake_definitions: + - BUILD_SHARED=FALSE + - BUILD_STATIC=TRUE + targets: + - python3.5m: + info: + <<: *library_static_exact + extra: + default: + include: + - include/python3.5m + system_depends: + - dl + - util + - python: + info: + <<: *executable_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml new file mode 100644 index 0000000..643b0c7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml @@ -0,0 +1,11 @@ +- raknet: + <<: *thirdparty_defaults + cmake_target: null + source: https://github.com/facebookarchive/RakNet.git + post_install: + - ./Source/*.h* include/raknet/ + targets: + - RakNetDLL: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml new file mode 100644 index 0000000..5707070 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml @@ -0,0 +1,17 @@ +- restclient-cpp: + <<: *thirdparty_defaults + source: https://github.com/mrtazz/restclient-cpp + depends: + - haxx-libcurl + build: + | + #!/bin/bash + source $(pwd)/../haxx-libcurl/find.script + ./autogen.sh + CXXFLAGS=-I$haxx_libcurl_HOME/include ./configure --prefix=$SELFHOME + make install + targets: + - restclient-cpp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml new file mode 100644 index 0000000..13d07b4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml @@ -0,0 +1,38 @@ +- sdl2: + <<: *thirdparty_defaults + mask: wl + version: 2.0.8.0 + source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz + uncompress_strip: SDL2-2.0.8 + depends: + - dune-glew + mode: dr + targets: + - SDL2-2.0: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/SDL2 + + +- sdl2: + <<: *thirdparty_defaults + mask: m + version: 2.0.8.0 + source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz + uncompress_strip: SDL2-2.0.8 + depends: + - dune-glew + mode: dr + targets: + - SDL2: + info: + <<: *library_dynamic_exact + extra: + default: + include: + - include/SDL2 + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml new file mode 100644 index 0000000..29c143d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml @@ -0,0 +1,14 @@ +- spdlog: + <<: *thirdparty_defaults + version: 0.0.0.0 + version_manager: git + source: https://github.com/gabime/spdlog.git + branch: -b v0.16.3 + post_install: + - ./include/*.h* include/ RECURSIVE + - ./include/*.cc* include/ RECURSIVE + targets: + - dummy: + info: + <<: *library_dynamic + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml new file mode 100644 index 0000000..d01d5e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml @@ -0,0 +1,49 @@ +- intel-tbb: + <<: *thirdparty_defaults + version: 4.4.0.0 + source: https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20150728oss_src.tgz + uncompress_strip: tbb44_20150728oss + build: + | + #!/bin/bash + source find.script + make info > info_.txt + tail -n +2 info_.txt > info.txt + source info.txt + make + code=$? + # install + cp -Rf include/ $intel_tbb_HOME + if [[ $BUILD_MODE == 'Debug' ]] + then + cp -Rf build/${tbb_build_prefix}_debug/*.so* $intel_tbb_HOME + else + cp -Rf build/${tbb_build_prefix}_release/*.so* $intel_tbb_HOME + fi + for i in $(find $intel_tbb_HOME -name "*.so"); do + name=$(basename $i) + echo rm $i + echo ln -sf $name.2 $i + rm $i + ln -sf $name.2 $i + done + exit $code + + targets: + - tbb: + info: + <<: *library_dynamic_exact + extra: + \*-debug: + definitions: + - -DTBB_USE_DEBUG=1 + default: + definitions: + - -DTBB_USE_DEBUG=0 + - tbbmalloc: + info: + <<: *library_dynamic_exact + - tbbmalloc_proxy: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml new file mode 100644 index 0000000..34d5cc9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml @@ -0,0 +1,16 @@ +- yamlcpp: + <<: *thirdparty_defaults + mode: dr + version: 0.0.0.0 + version_manager: git + cmake_target: null + cmake_definitions: + - BUILD_SHARED_LIBS=ON + post_install: + - ./include/yaml-cpp/*.h include/yaml-cpp RECURSIVE + source: https://github.com/jbeder/yaml-cpp.git + targets: + - yaml-cpp: + info: + <<: *library_dynamic_exact + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py new file mode 100644 index 0000000..fcb2872 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py @@ -0,0 +1,139 @@ +import os +import sys +import utils +import logging +import hash_version +from itertools import product +from third_party import platforms +from third_party import get_identifier + + +def print_folder(folder): + for root, dirs, files in os.walk(folder): + path = root.split(os.sep) + logging.info((len(path) - 1) * '... ' + '%s/' % os.path.basename(root)) + for file in files: + logging.info(len(path) * '... ' + '%s' % file) + + +def packing(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + version_git = node.get_version() + packing = node.is_packing() + if not packing: + logging.warning('Skiping package: %s' % package) + return 0 + + manager = node.get_version_manager() + if manager == "git": + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + revision_git = hash_version.get_last_changeset(build_directory, short=False) + version_old = node.get_version() + version_git = hash_version.to_cmaki_version(build_directory, revision_git) + logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) + + current_workspace = node.get_binary_workspace(plat) + current_base = node.get_base_folder() + oldversion = node.get_version() + try: + node.set_version(version_git) + updated_workspace = node.get_binary_workspace(plat) + updated_base = node.get_base_folder() + + current_base2 = os.path.join(current_workspace, current_base) + updated_base2 = os.path.join(current_workspace, updated_base) + logging.debug("from: %s" % current_base2) + logging.debug("to: %s" % updated_base2) + if current_base != updated_base: + utils.move_folder_recursive(current_base2, updated_base2) + logging.debug('-- copy from: {}, {}'.format(current_workspace, os.path.exists(current_workspace))) + logging.debug('-- copy to: {}, {}'.format(updated_workspace, os.path.exists(updated_workspace))) + utils.move_folder_recursive(current_workspace, updated_workspace) + finally: + node.set_version(oldversion) + + node.set_version(version_git) + version = node.get_version() + + # regenerate autoscripts with new version + node.generate_scripts_headers(compiler_replace_maps) + + # # generate versions.cmake + node.generate_3rdpartyversion(parameters.prefix) + + precmd = '' + if utils.is_windows(): + precmd = 'cmake -E ' + + folder_3rdparty = parameters.third_party_dir + output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) + utils.trymkdir(output_3rdparty) + + folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) + utils.trymkdir(folder_mark) + + utils.superverbose(parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) + errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) + logging.debug('errors generating cmakefiles: %d' % errors) + node.ret += abs(errors) + + for plat in platforms: + utils.superverbose(parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) + workspace = node.get_workspace(plat) + current_workspace = node.get_binary_workspace(plat) + utils.trymkdir(current_workspace) + with utils.working_directory(current_workspace): + + logging.info('working directory: {}'.format(current_workspace)) + + if utils.is_windows(): + utils.safe_system('del /s *.ilk') + utils.safe_system('del /s *.exp') + + current_base = node.get_base_folder() + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) + prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) + + logging.info('generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), current_base))) + logging.info('generating md5file %s' % prefix_package_md5) + print_folder(current_base) + + # packing install + gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, current_base) + + node.ret += abs( node.safe_system(gen_targz, compiler_replace_maps) ) + if not os.path.exists(prefix_package): + logging.error('No such file: {}'.format(prefix_package)) + return False + + # calculate md5 file + package_md5 = utils.md5sum(prefix_package) + logging.debug("new package {}, with md5sum {}".format(prefix_package, package_md5)) + with open(prefix_package_md5, 'wt') as f: + f.write('%s\n' % package_md5) + + # packing cmakefiles (more easy distribution) + if not parameters.no_packing_cmakefiles: + for plat in platforms: + current_base = node.get_base_folder() + prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (current_base, plat)) + with utils.working_directory(folder_3rdparty): + + logging.info('working directory: {}'.format(folder_3rdparty)) + + logging.debug('working dir: %s' % folder_3rdparty) + logging.info('generating package cmake %s' % prefix_package_cmake) + print_folder(current_base) + + gen_targz_cmake = '{}tar zcvf {} {}'.format(precmd, prefix_package_cmake, current_base) + node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps) ) + if not os.path.exists(prefix_package_cmake): + logging.error('No such file: {}'.format(prefix_package_cmake)) + return False + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt new file mode 100644 index 0000000..dcb2251 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt @@ -0,0 +1,75 @@ + +#******************************************************************************* +# Copyright (c) 2016 +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Eclipse Public License v1.0 +# and Eclipse Distribution License v1.0 which accompany this distribution. +# +# The Eclipse Public License is available at +# http://www.eclipse.org/legal/epl-v10.html +# and the Eclipse Distribution License is available at +# http://www.eclipse.org/org/documents/edl-v10.php. +# +# Contributors: +# Guilherme Maciel Ferreira - initial version +#*******************************************************************************/ + +## Note: on OS X you should install XCode and the associated command-line tools + +## cmake flags +cmake_minimum_required(VERSION 3.1 FATAL_ERROR) + +## project name +project("paho-mqtt-cpp" LANGUAGES CXX) + +include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) +set(PAHO_MQTT_C_PATH "${paho_mqtt3_LIBDIR}" CACHE PATH "Add a path to paho.mqtt.c library and headers") + +## library name +set(PAHO_MQTT_CPP paho-mqttpp3) + +## build settings +set(PAHO_VERSION_MAJOR 0) +set(PAHO_VERSION_MINOR 9) +set(PAHO_VERSION_PATCH 0) + +set(CLIENT_VERSION ${PAHO_VERSION_MAJOR}.${PAHO_VERSION_MINOR}.${PAHO_VERSION_PATCH}) +set(CPACK_PACKAGE_VERSION_MAJOR ${PAHO_VERSION_MAJOR}) +set(CPACK_PACKAGE_VERSION_MINOR ${PAHO_VERSION_MINOR}) +set(CPACK_PACKAGE_VERSION_PATCH ${PAHO_VERSION_PATCH}) + +## build options +set(PAHO_BUILD_STATIC FALSE CACHE BOOL "Build static library") +set(PAHO_BUILD_SAMPLES FALSE CACHE BOOL "Build sample programs") +set(PAHO_BUILD_DOCUMENTATION FALSE CACHE BOOL "Create and install the HTML based API documentation (requires Doxygen)") +set(PAHO_MQTT_C paho-mqtt3a) +SET(PAHO_WITH_SSL TRUE CACHE BOOL "Flag that defines whether to build ssl-enabled binaries too. ") + +## build flags +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +## build directories + +add_subdirectory(src) +add_subdirectory(src/mqtt) + +if(PAHO_BUILD_SAMPLES) + add_subdirectory(src/samples) +endif() + +if(PAHO_BUILD_DOCUMENTATION) + add_subdirectory(doc) +endif() + +## packaging settings +if(WIN32) + set(CPACK_GENERATOR "ZIP") +elseif(UNIX) + set(CPACK_GENERATOR "TGZ") +endif() + +include(CPack) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt new file mode 100644 index 0000000..d35ab8b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt @@ -0,0 +1,161 @@ +#******************************************************************************* +# Copyright (c) 2016 +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Eclipse Public License v1.0 +# and Eclipse Distribution License v1.0 which accompany this distribution. +# +# The Eclipse Public License is available at +# http://www.eclipse.org/legal/epl-v10.html +# and the Eclipse Distribution License is available at +# http://www.eclipse.org/org/documents/edl-v10.php. +# +# Contributors: +# Guilherme Maciel Ferreira - initial version +#*******************************************************************************/ + +## Note: on OS X you should install XCode and the associated command-line tools + +include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) +set(paho_mqtt3_LIBRARIES paho-mqtt3c paho-mqtt3a) +link_directories("${paho_mqtt3_LIBDIR}") +include_directories("${paho_mqtt3_INCLUDE}") +# TODO: use find_package +# find_package(paho-mqtt3 REQUIRED) + +## include directories +include_directories(${CMAKE_CURRENT_SOURCE_DIR}) + + +## libraries +if(WIN32) + set(LIBS_SYSTEM + ws2_32) +elseif(UNIX) + if(CMAKE_SYSTEM_NAME MATCHES "Linux") + set(LIB_DL dl) + endif() + set(LIBS_SYSTEM + ${LIB_DL} + c + stdc++ + pthread) +endif() + +## use Object Library to optimize compilation +set(COMMON_SRC + async_client.cpp + client.cpp + disconnect_options.cpp + iclient_persistence.cpp + message.cpp + response_options.cpp + ssl_options.cpp + string_collection.cpp + token.cpp + topic.cpp + connect_options.cpp + will_options.cpp) + +if(PAHO_WITH_SSL) + add_definitions(-DOPENSSL) +endif() + +add_library(common_obj OBJECT + ${COMMON_SRC}) + +## set position independent flag (-fPIC on Unix) +set_property(TARGET common_obj + PROPERTY POSITION_INDEPENDENT_CODE ON) + +## create the shared library +add_library(${PAHO_MQTT_CPP} SHARED + $) + +## add dependencies to the shared library +target_link_libraries(${PAHO_MQTT_CPP} + ${LIBS_SYSTEM}) + +## set the shared library soname +set_target_properties(${PAHO_MQTT_CPP} PROPERTIES + VERSION ${CLIENT_VERSION} + SOVERSION ${PAHO_VERSION_MAJOR}) + +## install the shared library +install(TARGETS ${PAHO_MQTT_CPP} + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib + RUNTIME DESTINATION bin) + +## build static version of the Paho MQTT C++ library +if(PAHO_BUILD_STATIC) + ## create the static library + add_library(${PAHO_MQTT_CPP}-static STATIC + $) + + ## add dependencies to the static library + target_link_libraries(${PAHO_MQTT_CPP}-static + ${LIBS_SYSTEM}) + + ## install the static library + install(TARGETS ${PAHO_MQTT_CPP}-static + ARCHIVE DESTINATION lib + LIBRARY DESTINATION lib) +endif() + +## extract Paho MQTT C include directory +get_filename_component(PAHO_MQTT_C_DEV_INC_DIR ${PAHO_MQTT_C_PATH}/src ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD_INC_DIR ${PAHO_MQTT_C_PATH}/include ABSOLUTE) +set(PAHO_MQTT_C_INC_DIR + ${PAHO_MQTT_C_DEV_INC_DIR} + ${PAHO_MQTT_C_STD_INC_DIR}) + +## extract Paho MQTT C library directory +get_filename_component(PAHO_MQTT_C_DEV_LIB_DIR ${PAHO_MQTT_C_PATH}/build/output ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD_LIB_DIR ${PAHO_MQTT_C_PATH}/lib ABSOLUTE) +get_filename_component(PAHO_MQTT_C_STD64_LIB_DIR ${PAHO_MQTT_C_PATH}/lib64 ABSOLUTE) +set(PAHO_MQTT_C_LIB_DIR + ${PAHO_MQTT_C_DEV_LIB_DIR} + ${PAHO_MQTT_C_STD_LIB_DIR} + ${PAHO_MQTT_C_STD64_LIB_DIR}) + +## extract Paho MQTT C binary directory (Windows may place libraries there) +get_filename_component(PAHO_MQTT_C_BIN_DIR ${PAHO_MQTT_C_PATH}/bin ABSOLUTE) + +## add library suffixes so Windows can find Paho DLLs +set(CMAKE_FIND_LIBRARY_PREFIXES ${CMAKE_FIND_LIBRARY_PREFIXES} "") +set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES} ".dll" ".lib") + +if(PAHO_WITH_SSL) + ## find the Paho MQTT C SSL library + find_library(PAHO_MQTT_C_LIB + NAMES paho-mqtt3as + mqtt3as + PATHS ${PAHO_MQTT_C_LIB_DIR} + ${PAHO_MQTT_C_BIN_DIR}) + + find_package(OpenSSL REQUIRED) +else() + ## find the Paho MQTT C library + find_library(PAHO_MQTT_C_LIB + NAMES paho-mqtt3a + mqtt + paho-mqtt + mqtt3 + paho-mqtt3 + mqtt3a + PATHS ${PAHO_MQTT_C_LIB_DIR} + ${PAHO_MQTT_C_BIN_DIR}) +endif() + +## use the Paho MQTT C library if found. Otherwise terminate the compilation +if(${PAHO_MQTT_C_LIB} STREQUAL "PAHO_MQTT_C_LIB-NOTFOUND") + message(FATAL_ERROR "Could not find Paho MQTT C library") +else() + include_directories(${PAHO_MQTT_C_INC_DIR}) + link_directories(${PAHO_MQTT_C_LIB_DIR}) + target_link_libraries(${PAHO_MQTT_CPP} + ${PAHO_MQTT_C_LIB} + ${paho_mqtt3_LIBRARIES}) +endif() + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py new file mode 100644 index 0000000..d0c44ed --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py @@ -0,0 +1,287 @@ +import os +import sys +import logging +import contextlib +import utils +import shutil +from third_party import exceptions_fail_group +from third_party import exceptions_fail_program +from third_party import FailThirdParty + + +def make_pipe(): + def process(): + pass + return process + + +def end_pipe(): + def process(p): + _ = list(p) + return process + + +def _create(): + b = make_pipe() + e = yield b + end_pipe()(e) + yield + + +@contextlib.contextmanager +def create(): + c = _create() + p = next(c) + yield (p, c) + + +def feed(packages): + def process(_): + for node in packages: + yield node + return process + + +def do(function, force, *args, **kwargs): + ''' + skeleton gtc stage + ''' + def process(packages): + def _process(): + for node in packages: + try: + package = node.get_package_name() + version = node.get_version() + + if not force: + # skip process if package came with error + if node.ret != 0: + logging.info('%s %s error detected: skiping' % (function.__name__, package)) + continue + + # skip process if package came interrupted + if node.interrupted: + logging.info('%s %s error detected: skiping' % (function.__name__, package)) + continue + + if function.__name__ != 'purge': + logger_function = logging.info + else: + logger_function = logging.debug + + logger_function('--------- begin@%s: %s (%s) --------' % (function.__name__, package, version)) + + # process package + ret = function(node, *args, **kwargs) + logging.debug('%s: return %s' % (function.__name__, ret)) + if isinstance(ret, bool): + if not ret: + node.ret += 1 + elif isinstance(ret, int): + # aggregation result + node.ret += abs(ret) + else: + logging.error('%s %s error invalid return: %s' % (function.__name__, package, ret)) + node.ret += 1 + + logger_function('--------- end@%s: %s (%s) --------' % (function.__name__, package, version)) + + if node.ret != 0: + node.fail_stage = function.__name__ + raise FailThirdParty('[exception] %s fail in stage: %s' % (package, function.__name__)) + + except FailThirdParty: + logging.error('fatal exception in package %s (%s)' % (package, version)) + node.ret += 1 + node.fail_stage = function.__name__ + raise + except exceptions_fail_group: + logging.error('fatal exception in package %s (%s)' % (package, version)) + node.ret += 1 + # add exception for show postponed + node.exceptions.append(sys.exc_info()) + node.fail_stage = function.__name__ + raise + except exceptions_fail_program: + logging.error('interruption in package %s (%s)' % (package, version)) + node.ret += 1 + node.fail_stage = function.__name__ + node.interrupted = True + raise + except: + # excepciones por fallos de programacion + logging.error('Postponed exception in package %s (%s)' % (package, version)) + node.ret += 1 + node.exceptions.append(sys.exc_info()) + node.fail_stage = function.__name__ + finally: + # send to next step + yield node + + for node in _process(): + yield node + return process + +####################### PIPELINE PROOF CONCEPT (UNDER CODE IS NOT USED) ############### + + +def echo(line): + def process(_): + yield line + return process + + +def cat(): + def process(p): + for line in p: + if(os.path.exists(line)): + with open(line, 'rt') as f: + for line2 in f: + yield line2 + else: + logging.warning(' filename %s not exists' % line) + return process + + +def find(folder, level=999): + def process(_): + for root, dirs, files in utils.walklevel(folder, level): + for name in files: + yield os.path.join(root, name) + return process + + +def grep(pattern): + def process(p): + for line in p: + if line.find(pattern) != -1: + yield line + return process + + +def grep_basename(pattern): + def process(p): + p0 = pattern[:1] + pL = pattern[-1:] + fixed_pattern = pattern.replace('*', '') + for line in p: + if(p0 == '*' and pL != '*'): + if os.path.basename(line).endswith(fixed_pattern): + yield line.replace('\\', '/') + elif(p0 != '*' and pL == '*'): + if os.path.basename(line).startswith(fixed_pattern): + yield line.replace('\\', '/') + else: + if os.path.basename(line).find(fixed_pattern) != -1: + yield line.replace('\\', '/') + return process + + +def grep_v(pattern): + def process(p): + for line in p: + if line.find(pattern) == -1: + yield line + return process + + +def endswith(pattern): + def process(p): + for line in p: + if line.endswith(pattern): + yield line + return process + + +def copy(rootdir, folder): + def process(p): + for line in p: + relfilename = os.path.relpath(line, rootdir) + destiny = os.path.join(folder, relfilename) + destiny_dir = os.path.dirname(destiny) + utils.trymkdir(destiny_dir) + shutil.copyfile(line, destiny) + if not os.path.exists(destiny): + raise Exception("Not exists %s" % destiny) + yield destiny + return process + + +def startswith(pattern): + def process(p): + for line in p: + if line.startswith(pattern): + yield line + return process + + +def printf(prefix = ''): + def process(p): + for line in p: + print("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def info(prefix = ''): + def process(p): + for line in p: + logging.info("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def debug(prefix = ''): + def process(p): + for line in p: + logging.debug("%s%s" % (prefix, line.rstrip())) + yield line + return process + + +def write_file(filename, mode='wt'): + def process(p): + content = [] + for line in p: + content.append(line) + with open(filename, mode) as f: + for line in content: + f.write('%s\n' % line.rstrip()) + for line in content: + yield line + return process + + +def tee(filename): + def process(p): + p = printf()(p) + p = write_file(filename)(p) + for line in p: + yield line + return process + + +def example_context(): + # using context + with create() as (p, finisher): + p = find('.')(p) + p = endswith('.cpp')(p) + p = cat()(p) + p = tee('result.txt')(p) + # send last part + finisher.send(p) + + +def example_simple(): + # not using context + p = make_pipe() + # begin + p = find('.', 2)(p) + p = endswith('.yml')(p) + p = grep_v('.build_')(p) + p = tee('result.txt')(p) + # end + end_pipe()(p) + +if __name__ == '__main__': + example_simple() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py new file mode 100644 index 0000000..d15de46 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py @@ -0,0 +1,72 @@ +import os +import sys +import utils +import logging +import shutil +from third_party import platforms +from third_party import build_unittests_foldername +from itertools import product +from third_party import prefered + + +def prepare(node, parameters, compiler_replace_maps): + + package = node.get_package_name() + + # source folder + source_dir = os.path.join(os.getcwd(), package) + utils.trymkdir(source_dir) + + # generate .build.sh / .build.cmd if is defined in yaml + node.get_generate_custom_script(source_dir) + + # generate find.script / find.cmd + node.generate_scripts_headers(compiler_replace_maps) + + # read root CMakeLists.txt + with open('CMakeLists.txt', 'rt') as f: + content_cmakelists = f.read() + + # OJO: dejar de borrar cuando reciclemos binarios + node.remove_packages() + + # run_tests or packing + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + logging.info('Preparing mode %s - %s' % (plat, build_mode)) + build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) + utils.trymkdir(build_directory) + + # download source and prepare in build_directory + node.prepare_third_party(build_directory, compiler_replace_maps) + + # copy source files to build + logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) + utils.copy_folder_recursive(source_dir, build_directory) + + # before copy files + with utils.working_directory(build_directory): + for bc in node.get_before_copy(): + chunks = [x.strip() for x in bc.split(' ') if x] + if len(chunks) != 2: + raise Exception('Invalid value in before_copy: %s' % bc) + logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) + shutil.copy2(chunks[0], chunks[1]) + + # if have cmakelists, insert root cmakelists header + cmake_prefix = node.get_cmake_prefix() + build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') + if os.path.exists(build_cmakelist) and (not node.has_custom_script(source_dir)): + with open(build_cmakelist, 'rt') as f: + content_cmakelists_package = f.read() + with open(build_cmakelist, 'wt') as f: + f.write('%s\n' % content_cmakelists) + f.write('%s\n' % content_cmakelists_package) + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "prepare"') + break + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py new file mode 100644 index 0000000..2349465 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py @@ -0,0 +1,36 @@ +import os +import utils +import logging +from third_party import platforms + +def purge(node, parameters): + + package = node.get_package_name() + + logging.debug("Cleaning headers and cmakefiles %s" % package) + node.remove_scripts_headers() + node.remove_cmakefiles() + + logging.debug("Cleaning download %s" % package) + uncompress_directory = node.get_download_directory() + utils.tryremove_dir(uncompress_directory) + + original_directory = node.get_original_directory() + utils.tryremove_dir(original_directory) + + for plat in platforms: + + if not node.get_exclude_from_clean(): + logging.debug("Cleaning install %s" % package) + utils.tryremove_dir(node.get_install_directory(plat)) + + build_modes = node.get_build_modes() + for build_mode in build_modes: + + logging.debug("Cleaning build %s" % package) + build_directory = node.get_build_directory(plat, build_mode) + utils.tryremove_dir(build_directory) + + # finish well + return True + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt new file mode 100644 index 0000000..618b3f8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt @@ -0,0 +1,34 @@ +cmake_minimum_required(VERSION 2.6) +project(RakNetLibStatic) + +FILE(GLOB ALL_HEADER_SRCS ${RakNet_SOURCE_DIR}/Source/*.h) +FILE(GLOB ALL_CPP_SRCS ${RakNet_SOURCE_DIR}/Source/*.cpp) + +include_directories( ${RAKNET_INTERNAL_INCLUDE_DIRS} ) + +add_library(RakNetLibStatic STATIC ${ALL_CPP_SRCS} ${ALL_HEADER_SRCS} readme.txt) + +IF(WIN32 AND NOT UNIX) + SET( CMAKE_CXX_FLAGS "/D WIN32 /D _RAKNET_LIB /D _CRT_NONSTDC_NO_DEPRECATE /D _CRT_SECURE_NO_DEPRECATE /GS- /GR- ") +ENDIF(WIN32 AND NOT UNIX) + +IF(WIN32 AND NOT UNIX) + target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) + + IF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") + + IF( MSVC10 OR MSVC11 OR MSVC12 OR MSVC14 ) + set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:\"LIBCD.lib LIBCMTD.lib MSVCRT.lib\"" ) + ELSE() + set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:"LIBCD.lib LIBCMTD.lib MSVCRT.lib"" ) + ENDIF() + + ENDIF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") + +ELSE(WIN32 AND NOT UNIX) + target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) + INSTALL(TARGETS RakNetLibStatic DESTINATION ${RakNet_SOURCE_DIR}/Lib/RakNetLibStatic) + INSTALL(FILES ${ALL_HEADER_SRCS} DESTINATION ${RakNet_SOURCE_DIR}/include/raknet) +ENDIF(WIN32 AND NOT UNIX) + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp new file mode 100644 index 0000000..8f20dfa --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp @@ -0,0 +1,372 @@ +/* + * Copyright (c) 2014, Oculus VR, Inc. + * All rights reserved. + * + * This source code is licensed under the BSD-style license found in the + * LICENSE file in the root directory of this source tree. An additional grant + * of patent rights can be found in the PATENTS file in the same directory. + * + */ + +#include "CCRakNetSlidingWindow.h" + +#if USE_SLIDING_WINDOW_CONGESTION_CONTROL==1 + +static const double UNSET_TIME_US=-1; + +#if CC_TIME_TYPE_BYTES==4 +static const CCTimeType SYN=10; +#else +static const CCTimeType SYN=10000; +#endif + +#include "MTUSize.h" +#include +#include +#include +#include "RakAssert.h" +#include "RakAlloca.h" + +using namespace RakNet; + +// ****************************************************** PUBLIC METHODS ****************************************************** + +CCRakNetSlidingWindow::CCRakNetSlidingWindow() +{ +} +// ---------------------------------------------------------------------------------------------------------------------------- +CCRakNetSlidingWindow::~CCRakNetSlidingWindow() +{ + +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::Init(CCTimeType curTime, uint32_t maxDatagramPayload) +{ + (void) curTime; + + lastRtt=estimatedRTT=deviationRtt=UNSET_TIME_US; + RakAssert(maxDatagramPayload <= MAXIMUM_MTU_SIZE); + MAXIMUM_MTU_INCLUDING_UDP_HEADER=maxDatagramPayload; + cwnd=maxDatagramPayload; + ssThresh=0.0; + oldestUnsentAck=0; + nextDatagramSequenceNumber=0; + nextCongestionControlBlock=0; + backoffThisBlock=speedUpThisBlock=false; + expectedNextSequenceNumber=0; + _isContinuousSend=false; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::Update(CCTimeType curTime, bool hasDataToSendOrResend) +{ + (void) curTime; + (void) hasDataToSendOrResend; +} +// ---------------------------------------------------------------------------------------------------------------------------- +int CCRakNetSlidingWindow::GetRetransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) +{ + (void) curTime; + (void) isContinuousSend; + (void) timeSinceLastTick; + + return unacknowledgedBytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +int CCRakNetSlidingWindow::GetTransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) +{ + (void) curTime; + (void) timeSinceLastTick; + + _isContinuousSend=isContinuousSend; + + if (unacknowledgedBytes<=cwnd) + return (int) (cwnd-unacknowledgedBytes); + else + return 0; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::ShouldSendACKs(CCTimeType curTime, CCTimeType estimatedTimeToNextTick) +{ + CCTimeType rto = GetSenderRTOForACK(); + (void) estimatedTimeToNextTick; + + // iphone crashes on comparison between double and int64 http://www.jenkinssoftware.com/forum/index.php?topic=2717.0 + if (rto==(CCTimeType) UNSET_TIME_US) + { + // Unknown how long until the remote system will retransmit, so better send right away + return true; + } + + return curTime >= oldestUnsentAck + SYN; +} +// ---------------------------------------------------------------------------------------------------------------------------- +DatagramSequenceNumberType CCRakNetSlidingWindow::GetNextDatagramSequenceNumber(void) +{ + return nextDatagramSequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +DatagramSequenceNumberType CCRakNetSlidingWindow::GetAndIncrementNextDatagramSequenceNumber(void) +{ + DatagramSequenceNumberType dsnt=nextDatagramSequenceNumber; + nextDatagramSequenceNumber++; + return dsnt; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendBytes(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnGotPacketPair(DatagramSequenceNumberType datagramSequenceNumber, uint32_t sizeInBytes, CCTimeType curTime) +{ + (void) curTime; + (void) sizeInBytes; + (void) datagramSequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::OnGotPacket(DatagramSequenceNumberType datagramSequenceNumber, bool isContinuousSend, CCTimeType curTime, uint32_t sizeInBytes, uint32_t *skippedMessageCount) +{ + (void) curTime; + (void) sizeInBytes; + (void) isContinuousSend; + + if (oldestUnsentAck==0) + oldestUnsentAck=curTime; + + if (datagramSequenceNumber==expectedNextSequenceNumber) + { + *skippedMessageCount=0; + expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; + } + else if (GreaterThan(datagramSequenceNumber, expectedNextSequenceNumber)) + { + *skippedMessageCount=datagramSequenceNumber-expectedNextSequenceNumber; + // Sanity check, just use timeout resend if this was really valid + if (*skippedMessageCount>1000) + { + // During testing, the nat punchthrough server got 51200 on the first packet. I have no idea where this comes from, but has happened twice + if (*skippedMessageCount>(uint32_t)50000) + return false; + *skippedMessageCount=1000; + } + expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; + } + else + { + *skippedMessageCount=0; + } + + return true; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnResend(CCTimeType curTime, RakNet::TimeUS nextActionTime) +{ + (void) curTime; + (void) nextActionTime; + + if (_isContinuousSend && backoffThisBlock==false && cwnd>MAXIMUM_MTU_INCLUDING_UDP_HEADER*2) + { + // Spec says 1/2 cwnd, but it never recovers because cwnd increases too slowly + //ssThresh=cwnd-8.0 * (MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd); + ssThresh=cwnd/2; + if (ssThresh ssThresh && ssThresh!=0) + cwnd = ssThresh + MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; + + // CC PRINTF + // printf("++ %.0f Slow start increase.\n", cwnd); + + } + else if (isNewCongestionControlPeriod) + { + cwnd+=MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; + + // CC PRINTF + // printf("+ %.0f Congestion avoidance increase.\n", cwnd); + } +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnDuplicateAck( CCTimeType curTime, DatagramSequenceNumberType sequenceNumber ) +{ + (void) curTime; + (void) sequenceNumber; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendAckGetBAndAS(CCTimeType curTime, bool *hasBAndAS, BytesPerMicrosecond *_B, BytesPerMicrosecond *_AS) +{ + (void) curTime; + (void) _B; + (void) _AS; + + *hasBAndAS=false; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendAck(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; + + oldestUnsentAck=0; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::OnSendNACK(CCTimeType curTime, uint32_t numBytes) +{ + (void) curTime; + (void) numBytes; + +} +// ---------------------------------------------------------------------------------------------------------------------------- +CCTimeType CCRakNetSlidingWindow::GetRTOForRetransmission(unsigned char timesSent) const +{ + (void) timesSent; + +#if CC_TIME_TYPE_BYTES==4 + const CCTimeType maxThreshold=2000; + //const CCTimeType minThreshold=100; + const CCTimeType additionalVariance=30; +#else + const CCTimeType maxThreshold=2000000; + //const CCTimeType minThreshold=100000; + const CCTimeType additionalVariance=30000; +#endif + + + if (estimatedRTT==UNSET_TIME_US) + return maxThreshold; + + //double u=1.0f; + double u=2.0f; + double q=4.0f; + + CCTimeType threshhold = (CCTimeType) (u * estimatedRTT + q * deviationRtt) + additionalVariance; + if (threshhold > maxThreshold) + return maxThreshold; + return threshhold; +} +// ---------------------------------------------------------------------------------------------------------------------------- +void CCRakNetSlidingWindow::SetMTU(uint32_t bytes) +{ + RakAssert(bytes < MAXIMUM_MTU_SIZE); + MAXIMUM_MTU_INCLUDING_UDP_HEADER=bytes; +} +// ---------------------------------------------------------------------------------------------------------------------------- +uint32_t CCRakNetSlidingWindow::GetMTU(void) const +{ + return MAXIMUM_MTU_INCLUDING_UDP_HEADER; +} +// ---------------------------------------------------------------------------------------------------------------------------- +BytesPerMicrosecond CCRakNetSlidingWindow::GetLocalReceiveRate(CCTimeType currentTime) const +{ + (void) currentTime; + + return 0; // TODO +} +// ---------------------------------------------------------------------------------------------------------------------------- +double CCRakNetSlidingWindow::GetRTT(void) const +{ + if (lastRtt==UNSET_TIME_US) + return 0.0; + return lastRtt; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::GreaterThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) +{ + // a > b? + const DatagramSequenceNumberType halfSpan =(DatagramSequenceNumberType) (((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2); + return b!=a && b-a>halfSpan; +} +// ---------------------------------------------------------------------------------------------------------------------------- +bool CCRakNetSlidingWindow::LessThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) +{ + // a < b? + const DatagramSequenceNumberType halfSpan = ((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2; + return b!=a && b-aGetNetworkID() < data->replica->GetNetworkID()) + return -1; + if (replica3->GetNetworkID() > data->replica->GetNetworkID()) + return 1; + */ + + // 7/28/2013 - If GetNetworkID chagned during runtime, the list would be out of order and lookup would always fail or go out of bounds + // I remember before that I could not directly compare + if (replica3->referenceIndex < data->replica->referenceIndex) + return -1; + if (replica3->referenceIndex > data->replica->referenceIndex) + return 1; + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +LastSerializationResult::LastSerializationResult() +{ + replica=0; + lastSerializationResultBS=0; + whenLastSerialized = RakNet::GetTime(); +} +LastSerializationResult::~LastSerializationResult() +{ + if (lastSerializationResultBS) + RakNet::OP_DELETE(lastSerializationResultBS,_FILE_AND_LINE_); +} +void LastSerializationResult::AllocBS(void) +{ + if (lastSerializationResultBS==0) + { + lastSerializationResultBS=RakNet::OP_NEW(_FILE_AND_LINE_); + } +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::ReplicaManager3() +{ + defaultSendParameters.orderingChannel=0; + defaultSendParameters.priority=HIGH_PRIORITY; + defaultSendParameters.reliability=RELIABLE_ORDERED; + defaultSendParameters.sendReceipt=0; + autoSerializeInterval=30; + lastAutoSerializeOccurance=0; + autoCreateConnections=true; + autoDestroyConnections=true; + currentlyDeallocatingReplica=0; + + for (unsigned int i=0; i < 255; i++) + worldsArray[i]=0; + + AddWorld(0); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::~ReplicaManager3() +{ + if (autoDestroyConnections) + { + for (unsigned int i=0; i < worldsList.Size(); i++) + { + RakAssert(worldsList[i]->connectionList.Size()==0); + } + } + Clear(true); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetAutoManageConnections(bool autoCreate, bool autoDestroy) +{ + autoCreateConnections=autoCreate; + autoDestroyConnections=autoDestroy; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAutoCreateConnections(void) const +{ + return autoCreateConnections; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAutoDestroyConnections(void) const +{ + return autoDestroyConnections; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::AutoCreateConnectionList( + DataStructures::List &participantListIn, + DataStructures::List &participantListOut, + WorldId worldId) +{ + for (unsigned int index=0; index < participantListIn.Size(); index++) + { + if (GetConnectionByGUID(participantListIn[index], worldId)) + { + Connection_RM3 *connection = AllocConnection(rakPeerInterface->GetSystemAddressFromGuid(participantListIn[index]), participantListIn[index]); + if (connection) + { + PushConnection(connection); + participantListOut.Push(connection, _FILE_AND_LINE_); + } + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::PushConnection(RakNet::Connection_RM3 *newConnection, WorldId worldId) +{ + if (newConnection==0) + return false; + if (GetConnectionByGUID(newConnection->GetRakNetGUID(), worldId)) + return false; + // Was this intended? + RakAssert(newConnection->GetRakNetGUID()!=rakPeerInterface->GetMyGUID()); + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index = world->connectionList.GetIndexOf(newConnection); + if (index==(unsigned int)-1) + { + world->connectionList.Push(newConnection,_FILE_AND_LINE_); + + // Send message to validate the connection + newConnection->SendValidation(rakPeerInterface, worldId); + + Connection_RM3::ConstructionMode constructionMode = newConnection->QueryConstructionMode(); + if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int pushIdx; + for (pushIdx=0; pushIdx < world->userReplicaList.Size(); pushIdx++) + newConnection->OnLocalReference(world->userReplicaList[pushIdx], this); + } + } + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::DeallocReplicaNoBroadcastDestruction(RakNet::Connection_RM3 *connection, RakNet::Replica3 *replica3) +{ + currentlyDeallocatingReplica=replica3; + replica3->DeallocReplica(connection); + currentlyDeallocatingReplica=0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNet::Connection_RM3 * ReplicaManager3::PopConnection(unsigned int index, WorldId worldId) +{ + DataStructures::List replicaList; + DataStructures::List destructionList; + DataStructures::List broadcastList; + RakNet::Connection_RM3 *connection; + unsigned int index2; + RM3ActionOnPopConnection action; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + connection=world->connectionList[index]; + + // Clear out downloadGroup + connection->ClearDownloadGroup(rakPeerInterface); + + RakNetGUID guid = connection->GetRakNetGUID(); + // This might be wrong, I am relying on the variable creatingSystemGuid which is transmitted + // automatically from the first system to reference the object. However, if an object changes + // owners then it is not going to be returned here, and therefore QueryActionOnPopConnection() + // will not be called for the new owner. + GetReplicasCreatedByGuid(guid, replicaList); + + for (index2=0; index2 < replicaList.Size(); index2++) + { + action = replicaList[index2]->QueryActionOnPopConnection(connection); + replicaList[index2]->OnPoppedConnection(connection); + if (action==RM3AOPC_DELETE_REPLICA) + { + if (replicaList[index2]->GetNetworkIDManager()) + destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); + } + else if (action==RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION) + { + if (replicaList[index2]->GetNetworkIDManager()) + destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); + + broadcastList.Push( replicaList[index2], _FILE_AND_LINE_ ); + } + else if (action==RM3AOPC_DO_NOTHING) + { + for (unsigned int index3 = 0; index3 < connection->queryToSerializeReplicaList.Size(); index3++) + { + LastSerializationResult *lsr = connection->queryToSerializeReplicaList[index3]; + lsr->whenLastSerialized=0; + if (lsr->lastSerializationResultBS) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + lsr->lastSerializationResultBS->bitStream[z].Reset(); + } + } + } + } + + BroadcastDestructionList(broadcastList, connection->GetSystemAddress()); + for (index2=0; index2 < destructionList.Size(); index2++) + { + // Do lookup in case DeallocReplica destroyed one of of the later Replica3 instances in the list + Replica3* replicaToDestroy = world->networkIDManager->GET_OBJECT_FROM_ID(destructionList[index2]); + if (replicaToDestroy) + { + replicaToDestroy->PreDestruction(connection); + replicaToDestroy->DeallocReplica(connection); + } + } + + world->connectionList.RemoveAtIndex(index); + return connection; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNet::Connection_RM3 * ReplicaManager3::PopConnection(RakNetGUID guid, WorldId worldId) +{ + unsigned int index; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetRakNetGUID()==guid) + { + return PopConnection(index, worldId); + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Reference(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index = ReferenceInternal(replica3, worldId); + + if (index!=(unsigned int)-1) + { + unsigned int pushIdx; + for (pushIdx=0; pushIdx < world->connectionList.Size(); pushIdx++) + { + Connection_RM3::ConstructionMode constructionMode = world->connectionList[pushIdx]->QueryConstructionMode(); + if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + world->connectionList[pushIdx]->OnLocalReference(replica3, this); + } + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned int ReplicaManager3::ReferenceInternal(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + index = world->userReplicaList.GetIndexOf(replica3); + if (index==(unsigned int)-1) + { + RakAssert(world->networkIDManager); + replica3->SetNetworkIDManager(world->networkIDManager); + // If it crashes on rakPeerInterface==0 then you didn't call RakPeerInterface::AttachPlugin() + if (replica3->creatingSystemGUID==UNASSIGNED_RAKNET_GUID) + replica3->creatingSystemGUID=rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + replica3->replicaManager=this; + if (replica3->referenceIndex==(uint32_t)-1) + { + replica3->referenceIndex=nextReferenceIndex++; + } + world->userReplicaList.Push(replica3,_FILE_AND_LINE_); + return world->userReplicaList.Size()-1; + } + return (unsigned int) -1; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Dereference(RakNet::Replica3 *replica3, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index, index2; + for (index=0; index < world->userReplicaList.Size(); index++) + { + if (world->userReplicaList[index]==replica3) + { + world->userReplicaList.RemoveAtIndex(index); + break; + } + } + + // Remove from all connections + for (index2=0; index2 < world->connectionList.Size(); index2++) + { + world->connectionList[index2]->OnDereference(replica3, this); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::DereferenceList(DataStructures::List &replicaListIn, WorldId worldId) +{ + unsigned int index; + for (index=0; index < replicaListIn.Size(); index++) + Dereference(replicaListIn[index], worldId); +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReplicasCreatedByMe(DataStructures::List &replicaListOut, WorldId worldId) +{ + //RakNetGUID myGuid = rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + GetReplicasCreatedByGuid(rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS), replicaListOut, worldId); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReferencedReplicaList(DataStructures::List &replicaListOut, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + replicaListOut=world->userReplicaList; +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetReplicasCreatedByGuid(RakNetGUID guid, DataStructures::List &replicaListOut, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + replicaListOut.Clear(false,_FILE_AND_LINE_); + unsigned int index; + for (index=0; index < world->userReplicaList.Size(); index++) + { + if (world->userReplicaList[index]->creatingSystemGUID==guid) + replicaListOut.Push(world->userReplicaList[index],_FILE_AND_LINE_); + } +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned ReplicaManager3::GetReplicaCount(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->userReplicaList.Size(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3 *ReplicaManager3::GetReplicaAtIndex(unsigned index, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->userReplicaList[index]; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +unsigned int ReplicaManager3::GetConnectionCount(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->connectionList.Size(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionAtIndex(unsigned index, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->connectionList[index]; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionBySystemAddress(const SystemAddress &sa, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetSystemAddress()==sa) + { + return world->connectionList[index]; + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3* ReplicaManager3::GetConnectionByGUID(RakNetGUID guid, WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetRakNetGUID()==guid) + { + return world->connectionList[index]; + } + } + return 0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultOrderingChannel(char def) +{ + defaultSendParameters.orderingChannel=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultPacketPriority(PacketPriority def) +{ + defaultSendParameters.priority=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetDefaultPacketReliability(PacketReliability def) +{ + defaultSendParameters.reliability=def; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetAutoSerializeInterval(RakNet::Time intervalMS) +{ + autoSerializeInterval=intervalMS; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::GetConnectionsThatHaveReplicaConstructed(Replica3 *replica, DataStructures::List &connectionsThatHaveConstructedThisReplica, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + connectionsThatHaveConstructedThisReplica.Clear(false,_FILE_AND_LINE_); + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->HasReplicaConstructed(replica)) + connectionsThatHaveConstructedThisReplica.Push(world->connectionList[index],_FILE_AND_LINE_); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool ReplicaManager3::GetAllConnectionDownloadsCompleted(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + unsigned int index; + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->GetDownloadWasCompleted()==false) + return false; + } + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::Clear(bool deleteWorlds) +{ + for (unsigned int i=0; i < worldsList.Size(); i++) + { + worldsList[i]->Clear(this); + if (deleteWorlds) + { + worldsArray[worldsList[i]->worldId]=0; + delete worldsList[i]; + } + } + if (deleteWorlds) + worldsList.Clear(false, _FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +ReplicaManager3::RM3World::RM3World() +{ + networkIDManager=0; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::RM3World::Clear(ReplicaManager3 *replicaManager3) +{ + if (replicaManager3->GetAutoDestroyConnections()) + { + for (unsigned int i=0; i < connectionList.Size(); i++) + replicaManager3->DeallocConnection(connectionList[i]); + } + else + { + // Clear out downloadGroup even if not auto destroying the connection, since the packets need to go back to RakPeer + for (unsigned int i=0; i < connectionList.Size(); i++) + connectionList[i]->ClearDownloadGroup(replicaManager3->GetRakPeerInterface()); + } + + for (unsigned int i=0; i < userReplicaList.Size(); i++) + { + userReplicaList[i]->replicaManager=0; + userReplicaList[i]->SetNetworkIDManager(0); + } + connectionList.Clear(true,_FILE_AND_LINE_); + userReplicaList.Clear(true,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PRO ReplicaManager3::GetDefaultSendParameters(void) const +{ + return defaultSendParameters; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::AddWorld(WorldId worldId) +{ + RakAssert(worldsArray[worldId]==0 && "World already in use"); + + RM3World *newWorld = RakNet::OP_NEW(_FILE_AND_LINE_); + newWorld->worldId=worldId; + worldsArray[worldId]=newWorld; + worldsList.Push(newWorld,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::RemoveWorld(WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + for (unsigned int i=0; i < worldsList.Size(); i++) + { + if (worldsList[i]==worldsArray[worldId]) + { + RakNet::OP_DELETE(worldsList[i],_FILE_AND_LINE_); + worldsList.RemoveAtIndexFast(i); + break; + } + } + worldsArray[worldId]=0; + +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +NetworkIDManager *ReplicaManager3::GetNetworkIDManager(WorldId worldId) const +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + return world->networkIDManager; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::SetNetworkIDManager(NetworkIDManager *_networkIDManager, WorldId worldId) +{ + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + world->networkIDManager=_networkIDManager; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnReceive(Packet *packet) +{ + if (packet->length<2) + return RR_CONTINUE_PROCESSING; + + WorldId incomingWorldId; + + RakNet::Time timestamp=0; + unsigned char packetIdentifier, packetDataOffset; + if ( ( unsigned char ) packet->data[ 0 ] == ID_TIMESTAMP ) + { + if ( packet->length > sizeof( unsigned char ) + sizeof( RakNet::Time ) ) + { + packetIdentifier = ( unsigned char ) packet->data[ sizeof( unsigned char ) + sizeof( RakNet::Time ) ]; + // Required for proper endian swapping + RakNet::BitStream tsBs(packet->data+sizeof(MessageID),packet->length-1,false); + tsBs.Read(timestamp); + // Next line assumes worldId is only 1 byte + RakAssert(sizeof(WorldId)==1); + incomingWorldId=packet->data[sizeof( unsigned char )*2 + sizeof( RakNet::Time )]; + packetDataOffset=sizeof( unsigned char )*3 + sizeof( RakNet::Time ); + } + else + return RR_STOP_PROCESSING_AND_DEALLOCATE; + } + else + { + packetIdentifier = ( unsigned char ) packet->data[ 0 ]; + // Next line assumes worldId is only 1 byte + RakAssert(sizeof(WorldId)==1); + incomingWorldId=packet->data[sizeof( unsigned char )]; + packetDataOffset=sizeof( unsigned char )*2; + } + + if (worldsArray[incomingWorldId]==0) + return RR_CONTINUE_PROCESSING; + + switch (packetIdentifier) + { + case ID_REPLICA_MANAGER_CONSTRUCTION: + return OnConstruction(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + case ID_REPLICA_MANAGER_SERIALIZE: + return OnSerialize(packet, packet->data, packet->length, packet->guid, timestamp, packetDataOffset, incomingWorldId); + case ID_REPLICA_MANAGER_DOWNLOAD_STARTED: + if (packet->wasGeneratedLocally==false) + { + return OnDownloadStarted(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + } + else + break; + case ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE: + if (packet->wasGeneratedLocally==false) + { + return OnDownloadComplete(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); + } + else + break; + case ID_REPLICA_MANAGER_SCOPE_CHANGE: + { + Connection_RM3 *connection = GetConnectionByGUID(packet->guid, incomingWorldId); + if (connection && connection->isValidated==false) + { + // This connection is now confirmed bidirectional + connection->isValidated=true; + // Reply back on validation + connection->SendValidation(rakPeerInterface,incomingWorldId); + } + } + } + + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::AutoConstructByQuery(ReplicaManager3 *replicaManager3, WorldId worldId) +{ + ValidateLists(replicaManager3); + + ConstructionMode constructionMode = QueryConstructionMode(); + + unsigned int index; + RM3ConstructionState constructionState; + LastSerializationResult *lsr; + index=0; + + constructedReplicasCulled.Clear(false,_FILE_AND_LINE_); + destroyedReplicasCulled.Clear(false,_FILE_AND_LINE_); + + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + while (index < queryToConstructReplicaList.Size()) + { + lsr=queryToConstructReplicaList[index]; + constructionState=lsr->replica->QueryConstruction(this, replicaManager3); + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY || constructionState==RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT) + { + OnReplicaAlreadyExists(index, replicaManager3); + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) + constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + + /* + if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) + { + // Serialize construction data to this connection + RakNet::BitStream bsOut; + bsOut.Write((MessageID)ID_REPLICA_MANAGER_3_SERIALIZE_CONSTRUCTION_EXISTING); + bsOut.Write(replicaManager3->GetWorldID()); + NetworkID networkId; + networkId=lsr->replica->GetNetworkID(); + bsOut.Write(networkId); + BitSize_t bitsWritten = bsOut.GetNumberOfBitsUsed(); + lsr->replica->SerializeConstructionExisting(&bsOut, this); + if (bsOut.GetNumberOfBitsUsed()!=bitsWritten) + replicaManager3->SendUnified(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,GetSystemAddress(), false); + } + + // Serialize first serialization to this connection. + // This is done here, as it isn't done in PushConstruction + SerializeParameters sp; + RakNet::BitStream emptyBs; + for (index=0; index < (unsigned int) RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) + { + sp.lastSentBitstream[index]=&emptyBs; + sp.pro[index]=replicaManager3->GetDefaultSendParameters(); + } + sp.bitsWrittenSoFar=0; + sp.destinationConnection=this; + sp.messageTimestamp=0; + sp.whenLastSerialized=0; + + RakNet::Replica3 *replica = lsr->replica; + + RM3SerializationResult res = replica->Serialize(&sp); + if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && + res!=RM3SR_DO_NOT_SERIALIZE && + res!=RM3SR_SERIALIZED_UNIQUELY) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + } + if (SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, replicaManager3->GetRakPeerInterface(), replicaManager3->GetWorldID())==SSICR_SENT_DATA) + lsr->replica->whenLastSerialized=RakNet::GetTimeMS(); + } + */ + } + else if (constructionState==RM3CS_SEND_CONSTRUCTION) + { + OnConstructToThisConnection(index, replicaManager3); + RakAssert(lsr->replica); + constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + } + else if (constructionState==RM3CS_NEVER_CONSTRUCT) + { + OnNeverConstruct(index, replicaManager3); + } + else// if (constructionState==RM3CS_NO_ACTION) + { + // Do nothing + index++; + } + } + + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + RM3DestructionState destructionState; + index=0; + while (index < queryToDestructReplicaList.Size()) + { + lsr=queryToDestructReplicaList[index]; + destructionState=lsr->replica->QueryDestruction(this, replicaManager3); + if (destructionState==RM3DS_SEND_DESTRUCTION) + { + OnSendDestructionFromQuery(index, replicaManager3); + destroyedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); + } + else if (destructionState==RM3DS_DO_NOT_QUERY_DESTRUCTION) + { + OnDoNotQueryDestruction(index, replicaManager3); + } + else// if (destructionState==RM3CS_NO_ACTION) + { + // Do nothing + index++; + } + } + } + } + else if (constructionMode==QUERY_CONNECTION_FOR_REPLICA_LIST) + { + QueryReplicaList(constructedReplicasCulled,destroyedReplicasCulled); + + unsigned int idx1, idx2; + + // Create new + for (idx2=0; idx2 < constructedReplicasCulled.Size(); idx2++) + OnConstructToThisConnection(constructedReplicasCulled[idx2], replicaManager3); + + bool exists; + for (idx2=0; idx2 < destroyedReplicasCulled.Size(); idx2++) + { + exists=false; + bool objectExists; + idx1=constructedReplicaList.GetIndexFromKey(destroyedReplicasCulled[idx2], &objectExists); + if (objectExists) + { + constructedReplicaList.RemoveAtIndex(idx1); + + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]->replica==destroyedReplicasCulled[idx2] ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + } + } + } + + SendConstruction(constructedReplicasCulled,destroyedReplicasCulled,replicaManager3->defaultSendParameters,replicaManager3->rakPeerInterface,worldId,replicaManager3); +} +void ReplicaManager3::Update(void) +{ + unsigned int index,index2,index3; + + WorldId worldId; + RM3World *world; + RakNet::Time time = RakNet::GetTime(); + + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + worldId = world->worldId; + + for (index=0; index < world->connectionList.Size(); index++) + { + if (world->connectionList[index]->isValidated==false) + continue; + world->connectionList[index]->AutoConstructByQuery(this, worldId); + } + } + + if (time - lastAutoSerializeOccurance >= autoSerializeInterval) + { + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + worldId = world->worldId; + + for (index=0; index < world->userReplicaList.Size(); index++) + { + world->userReplicaList[index]->forceSendUntilNextUpdate=false; + world->userReplicaList[index]->OnUserReplicaPreSerializeTick(); + } + + unsigned int index; + SerializeParameters sp; + sp.curTime=time; + Connection_RM3 *connection; + SendSerializeIfChangedResult ssicr; + LastSerializationResult *lsr; + + sp.messageTimestamp=0; + for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) + sp.pro[i]=defaultSendParameters; + index2=0; + for (index=0; index < world->connectionList.Size(); index++) + { + connection = world->connectionList[index]; + sp.bitsWrittenSoFar=0; + index2=0; + sp.destinationConnection=connection; + + DataStructures::List replicasToSerialize; + replicasToSerialize.Clear(true, _FILE_AND_LINE_); + if (connection->QuerySerializationList(replicasToSerialize)) + { + // Update replica->lsr so we can lookup in the next block + // lsr is per connection / per replica + while (index2 < connection->queryToSerializeReplicaList.Size()) + { + connection->queryToSerializeReplicaList[index2]->replica->lsr=connection->queryToSerializeReplicaList[index2]; + index2++; + } + + + // User is manually specifying list of replicas to serialize + index2=0; + while (index2 < replicasToSerialize.Size()) + { + lsr=replicasToSerialize[index2]->lsr; + RakAssert(lsr->replica==replicasToSerialize[index2]); + + sp.whenLastSerialized=lsr->whenLastSerialized; + ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); + if (ssicr==SSICR_SENT_DATA) + lsr->whenLastSerialized=time; + index2++; + } + } + else + { + while (index2 < connection->queryToSerializeReplicaList.Size()) + { + lsr=connection->queryToSerializeReplicaList[index2]; + + sp.destinationConnection=connection; + sp.whenLastSerialized=lsr->whenLastSerialized; + ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); + if (ssicr==SSICR_SENT_DATA) + { + lsr->whenLastSerialized=time; + index2++; + } + else if (ssicr==SSICR_NEVER_SERIALIZE) + { + // Removed from the middle of the list + } + else + index2++; + } + } + } + } + + lastAutoSerializeOccurance=time; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnClosedConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason ) +{ + (void) lostConnectionReason; + (void) systemAddress; + if (autoDestroyConnections) + { + Connection_RM3 *connection = PopConnection(rakNetGUID); + if (connection) + DeallocConnection(connection); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnNewConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, bool isIncoming) +{ + (void) isIncoming; + if (autoCreateConnections) + { + Connection_RM3 *connection = AllocConnection(systemAddress, rakNetGUID); + if (connection) + PushConnection(connection); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnRakPeerShutdown(void) +{ + if (autoDestroyConnections) + { + RM3World *world; + unsigned int index3; + for (index3=0; index3 < worldsList.Size(); index3++) + { + world = worldsList[index3]; + + while (world->connectionList.Size()) + { + Connection_RM3 *connection = PopConnection(world->connectionList.Size()-1, world->worldId); + if (connection) + DeallocConnection(connection); + } + } + } + + + Clear(false); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void ReplicaManager3::OnDetach(void) +{ + OnRakPeerShutdown(); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnConstruction(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + RM3World *world = worldsArray[worldId]; + + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + { + // Almost certainly a bug + RakAssert("Got OnConstruction but no connection yet" && 0); + return RR_CONTINUE_PROCESSING; + } + if (connection->groupConstructionAndSerialize) + { + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + uint16_t constructionObjectListSize, destructionObjectListSize, index, index2; + BitSize_t streamEnd, writeAllocationIDEnd; + Replica3 *replica; + NetworkID networkId; + RakNetGUID creatingSystemGuid; + bool actuallyCreateObject=false; + + DataStructures::List actuallyCreateObjectList; + DataStructures::List constructionTickStack; + + RakAssert(world->networkIDManager); + + bsIn.Read(constructionObjectListSize); + for (index=0; index < constructionObjectListSize; index++) + { + bsIn.Read(streamEnd); + bsIn.Read(networkId); + Replica3* existingReplica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + bsIn.Read(actuallyCreateObject); + actuallyCreateObjectList.Push(actuallyCreateObject, _FILE_AND_LINE_); + bsIn.AlignReadToByteBoundary(); + + if (actuallyCreateObject) + { + bsIn.Read(creatingSystemGuid); + bsIn.Read(writeAllocationIDEnd); + + //printf("OnConstruction: %i\n",networkId.guid.g); // Removeme + if (existingReplica) + { + existingReplica->replicaManager=this; + + // Network ID already in use + connection->OnDownloadExisting(existingReplica, this); + + constructionTickStack.Push(0, _FILE_AND_LINE_); + bsIn.SetReadOffset(streamEnd); + continue; + } + + bsIn.AlignReadToByteBoundary(); + replica = connection->AllocReplica(&bsIn, this); + if (replica==0) + { + constructionTickStack.Push(0, _FILE_AND_LINE_); + bsIn.SetReadOffset(streamEnd); + continue; + } + + // Go past the bitStream written to with WriteAllocationID(). Necessary in case the user didn't read out the bitStream the same way it was written + // bitOffset2 is already aligned + bsIn.SetReadOffset(writeAllocationIDEnd); + + replica->SetNetworkIDManager(world->networkIDManager); + replica->SetNetworkID(networkId); + + replica->replicaManager=this; + replica->creatingSystemGUID=creatingSystemGuid; + + if (!replica->QueryRemoteConstruction(connection) || + !replica->DeserializeConstruction(&bsIn, connection)) + { + DeallocReplicaNoBroadcastDestruction(connection, replica); + bsIn.SetReadOffset(streamEnd); + constructionTickStack.Push(0, _FILE_AND_LINE_); + continue; + } + + constructionTickStack.Push(replica, _FILE_AND_LINE_); + + // Register the replica + ReferenceInternal(replica, worldId); + } + else + { + if (existingReplica) + { + existingReplica->DeserializeConstructionExisting(&bsIn, connection); + constructionTickStack.Push(existingReplica, _FILE_AND_LINE_); + } + else + { + constructionTickStack.Push(0, _FILE_AND_LINE_); + } + } + + + bsIn.SetReadOffset(streamEnd); + bsIn.AlignReadToByteBoundary(); + } + + RakAssert(constructionTickStack.Size()==constructionObjectListSize); + RakAssert(actuallyCreateObjectList.Size()==constructionObjectListSize); + + RakNet::BitStream empty; + for (index=0; index < constructionObjectListSize; index++) + { + bool pdcWritten=false; + bsIn.Read(pdcWritten); + if (pdcWritten) + { + bsIn.AlignReadToByteBoundary(); + bsIn.Read(streamEnd); + bsIn.Read(networkId); + if (constructionTickStack[index]!=0) + { + bsIn.AlignReadToByteBoundary(); + if (actuallyCreateObjectList[index]) + constructionTickStack[index]->PostDeserializeConstruction(&bsIn, connection); + else + constructionTickStack[index]->PostDeserializeConstructionExisting(&bsIn, connection); + } + bsIn.SetReadOffset(streamEnd); + } + else + { + if (constructionTickStack[index]!=0) + { + if (actuallyCreateObjectList[index]) + constructionTickStack[index]->PostDeserializeConstruction(&empty, connection); + else + constructionTickStack[index]->PostDeserializeConstructionExisting(&empty, connection); + } + } + } + + for (index=0; index < constructionObjectListSize; index++) + { + if (constructionTickStack[index]!=0) + { + if (actuallyCreateObjectList[index]) + { + // Tell the connection(s) that this object exists since they just sent it to us + connection->OnDownloadFromThisSystem(constructionTickStack[index], this); + + for (index2=0; index2 < world->connectionList.Size(); index2++) + { + if (world->connectionList[index2]!=connection) + world->connectionList[index2]->OnDownloadFromOtherSystem(constructionTickStack[index], this); + } + } + } + } + + // Destructions + bool b = bsIn.Read(destructionObjectListSize); + (void) b; + RakAssert(b); + for (index=0; index < destructionObjectListSize; index++) + { + bsIn.Read(networkId); + bsIn.Read(streamEnd); + replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + if (replica==0) + { + // Unknown object + bsIn.SetReadOffset(streamEnd); + continue; + } + bsIn.Read(replica->deletingSystemGUID); + if (replica->DeserializeDestruction(&bsIn,connection)) + { + // Make sure it wasn't deleted in DeserializeDestruction + if (world->networkIDManager->GET_OBJECT_FROM_ID(networkId)) + { + replica->PreDestruction(connection); + + // Forward deletion by remote system + if (replica->QueryRelayDestruction(connection)) + BroadcastDestruction(replica,connection->GetSystemAddress()); + Dereference(replica); + DeallocReplicaNoBroadcastDestruction(connection, replica); + } + } + else + { + replica->PreDestruction(connection); + connection->OnDereference(replica, this); + } + + bsIn.AlignReadToByteBoundary(); + } + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnSerialize(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, RakNet::Time timestamp, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + if (connection->groupConstructionAndSerialize) + { + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + RM3World *world = worldsArray[worldId]; + RakAssert(world->networkIDManager); + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + + struct DeserializeParameters ds; + ds.timeStamp=timestamp; + ds.sourceConnection=connection; + + Replica3 *replica; + NetworkID networkId; + BitSize_t bitsUsed; + bsIn.Read(networkId); + //printf("OnSerialize: %i\n",networkId.guid.g); // Removeme + replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); + if (replica) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + bsIn.Read(ds.bitstreamWrittenTo[z]); + if (ds.bitstreamWrittenTo[z]) + { + bsIn.ReadCompressed(bitsUsed); + bsIn.AlignReadToByteBoundary(); + bsIn.Read(ds.serializationBitstream[z], bitsUsed); + } + } + replica->Deserialize(&ds); + } + return RR_CONTINUE_PROCESSING; +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnDownloadStarted(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + if (connection->QueryGroupDownloadMessages() && + // ID_DOWNLOAD_STARTED will be processed twice, being processed the second time once ID_DOWNLOAD_COMPLETE arrives. + // However, the second time groupConstructionAndSerialize will be set to true so it won't be processed a third time + connection->groupConstructionAndSerialize==false + ) + { + // These messages will be held by the plugin and returned when the download is complete + connection->groupConstructionAndSerialize=true; + RakAssert(connection->downloadGroup.Size()==0); + connection->downloadGroup.Push(packet, __FILE__, __LINE__); + return RR_STOP_PROCESSING; + } + + connection->groupConstructionAndSerialize=false; + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + connection->DeserializeOnDownloadStarted(&bsIn); + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +PluginReceiveResult ReplicaManager3::OnDownloadComplete(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) +{ + Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); + if (connection==0) + return RR_CONTINUE_PROCESSING; + + if (connection->groupConstructionAndSerialize==true && connection->downloadGroup.Size()>0) + { + // Push back buffered packets in front of this one + unsigned int i; + for (i=0; i < connection->downloadGroup.Size(); i++) + rakPeerInterface->PushBackPacket(connection->downloadGroup[i],false); + + // Push this one to be last too. It will be processed again, but the second time + // groupConstructionAndSerialize will be false and downloadGroup will be empty, so it will go past this block + connection->downloadGroup.Clear(__FILE__,__LINE__); + rakPeerInterface->PushBackPacket(packet,false); + + return RR_STOP_PROCESSING; + } + + RakNet::BitStream bsIn(packetData,packetDataLength,false); + bsIn.IgnoreBytes(packetDataOffset); + connection->gotDownloadComplete=true; + connection->DeserializeOnDownloadComplete(&bsIn); + return RR_CONTINUE_PROCESSING; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3* ReplicaManager3::GetReplicaByNetworkID(NetworkID networkId, WorldId worldId) +{ + RM3World *world = worldsArray[worldId]; + + unsigned int i; + for (i=0; i < world->userReplicaList.Size(); i++) + { + if (world->userReplicaList[i]->GetNetworkID()==networkId) + return world->userReplicaList[i]; + } + return 0; +} + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +void ReplicaManager3::BroadcastDestructionList(DataStructures::List &replicaListSource, const SystemAddress &exclusionAddress, WorldId worldId) +{ + RakNet::BitStream bsOut; + unsigned int i,j; + + RakAssert(worldsArray[worldId]!=0 && "World not in use"); + RM3World *world = worldsArray[worldId]; + + DataStructures::List replicaList; + + for (i=0; i < replicaListSource.Size(); i++) + { + if (replicaListSource[i]==currentlyDeallocatingReplica) + continue; + replicaList.Push(replicaListSource[i], __FILE__, __LINE__); + } + + if (replicaList.Size()==0) + return; + + for (i=0; i < replicaList.Size(); i++) + { + if (replicaList[i]->deletingSystemGUID==UNASSIGNED_RAKNET_GUID) + replicaList[i]->deletingSystemGUID=GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + } + + for (j=0; j < world->connectionList.Size(); j++) + { + if (world->connectionList[j]->GetSystemAddress()==exclusionAddress) + continue; + + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); + bsOut.Write(worldId); + uint16_t cnt=0; + bsOut.Write(cnt); // No construction + cnt=(uint16_t) replicaList.Size(); + BitSize_t cntOffset=bsOut.GetWriteOffset();; + bsOut.Write(cnt); // Overwritten at send call + cnt=0; + + for (i=0; i < replicaList.Size(); i++) + { + if (world->connectionList[j]->HasReplicaConstructed(replicaList[i])==false) + continue; + cnt++; + + NetworkID networkId; + networkId=replicaList[i]->GetNetworkID(); + bsOut.Write(networkId); + BitSize_t offsetStart, offsetEnd; + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); + bsOut.Write(replicaList[i]->deletingSystemGUID); + replicaList[i]->SerializeDestruction(&bsOut, world->connectionList[j]); + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + + if (cnt>0) + { + BitSize_t curOffset=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(cntOffset); + bsOut.Write(cnt); + bsOut.SetWriteOffset(curOffset); + rakPeerInterface->Send(&bsOut,defaultSendParameters.priority,defaultSendParameters.reliability,defaultSendParameters.orderingChannel,world->connectionList[j]->GetSystemAddress(),false, defaultSendParameters.sendReceipt); + } + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +void ReplicaManager3::BroadcastDestruction(Replica3 *replica, const SystemAddress &exclusionAddress) +{ + DataStructures::List replicaList; + replicaList.Push(replica, _FILE_AND_LINE_ ); + BroadcastDestructionList(replicaList,exclusionAddress); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3::Connection_RM3(const SystemAddress &_systemAddress, RakNetGUID _guid) +: systemAddress(_systemAddress), guid(_guid) +{ + isValidated=false; + isFirstConstruction=true; + groupConstructionAndSerialize=false; + gotDownloadComplete=false; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Connection_RM3::~Connection_RM3() +{ + unsigned int i; + for (i=0; i < constructedReplicaList.Size(); i++) + RakNet::OP_DELETE(constructedReplicaList[i], _FILE_AND_LINE_); + for (i=0; i < queryToConstructReplicaList.Size(); i++) + RakNet::OP_DELETE(queryToConstructReplicaList[i], _FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::GetConstructedReplicas(DataStructures::List &objectsTheyDoHave) +{ + objectsTheyDoHave.Clear(true,_FILE_AND_LINE_); + for (unsigned int idx=0; idx < constructedReplicaList.Size(); idx++) + { + objectsTheyDoHave.Push(constructedReplicaList[idx]->replica, _FILE_AND_LINE_ ); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Connection_RM3::HasReplicaConstructed(RakNet::Replica3 *replica) +{ + bool objectExists; + constructedReplicaList.GetIndexFromKey(replica, &objectExists); + return objectExists; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::SendSerializeHeader(RakNet::Replica3 *replica, RakNet::Time timestamp, RakNet::BitStream *bs, WorldId worldId) +{ + bs->Reset(); + + if (timestamp!=0) + { + bs->Write((MessageID)ID_TIMESTAMP); + bs->Write(timestamp); + } + bs->Write((MessageID)ID_REPLICA_MANAGER_SERIALIZE); + bs->Write(worldId); + bs->Write(replica->GetNetworkID()); +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::ClearDownloadGroup(RakPeerInterface *rakPeerInterface) +{ + unsigned int i; + for (i=0; i < downloadGroup.Size(); i++) + rakPeerInterface->DeallocatePacket(downloadGroup[i]); + downloadGroup.Clear(__FILE__,__LINE__); +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +SendSerializeIfChangedResult Connection_RM3::SendSerialize(RakNet::Replica3 *replica, bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::BitStream serializationData[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::Time timestamp, PRO sendParameters[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakPeerInterface *rakPeer, unsigned char worldId, RakNet::Time curTime) +{ + bool channelHasData; + BitSize_t sum=0; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (indicesToSend[z]) + sum+=serializationData[z].GetNumberOfBitsUsed(); + } + + RakNet::BitStream out; + BitSize_t bitsPerChannel[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + + if (sum==0) + { + memset(bitsPerChannel, 0, sizeof(bitsPerChannel)); + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + return SSICR_DID_NOT_SEND_DATA; + } + + RakAssert(replica->GetNetworkID()!=UNASSIGNED_NETWORK_ID); + + BitSize_t bitsUsed; + + int channelIndex; + PRO lastPro=sendParameters[0]; + + for (channelIndex=0; channelIndex < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex++) + { + if (channelIndex==0) + { + SendSerializeHeader(replica, timestamp, &out, worldId); + } + else if (lastPro!=sendParameters[channelIndex]) + { + // Write out remainder + for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) + { + bitsPerChannel[channelIndex2]=0; + out.Write(false); + } + + // Send remainder + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); + + // If no data left to send, quit out + bool anyData=false; + for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) + { + if (serializationData[channelIndex2].GetNumberOfBitsUsed()>0) + { + anyData=true; + break; + } + } + if (anyData==false) + return SSICR_SENT_DATA; + + // Restart stream + SendSerializeHeader(replica, timestamp, &out, worldId); + + for (int channelIndex2=0; channelIndex2 < channelIndex; channelIndex2++) + { + bitsPerChannel[channelIndex2]=0; + out.Write(false); + } + lastPro=sendParameters[channelIndex]; + } + + bitsUsed=serializationData[channelIndex].GetNumberOfBitsUsed(); + channelHasData = indicesToSend[channelIndex]==true && bitsUsed>0; + out.Write(channelHasData); + if (channelHasData) + { + bitsPerChannel[channelIndex] = bitsUsed; + out.WriteCompressed(bitsUsed); + out.AlignWriteToByteBoundary(); + out.Write(serializationData[channelIndex]); + // Crap, forgot this line, was a huge bug in that I'd only send to the first 3 systems + serializationData[channelIndex].ResetReadPointer(); + } + else + { + bitsPerChannel[channelIndex] = 0; + } + } + replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); + rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); + return SSICR_SENT_DATA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +SendSerializeIfChangedResult Connection_RM3::SendSerializeIfChanged(LastSerializationResult *lsr, SerializeParameters *sp, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager, RakNet::Time curTime) +{ + RakNet::Replica3 *replica = lsr->replica; + + if (replica->GetNetworkID()==UNASSIGNED_NETWORK_ID) + return SSICR_DID_NOT_SEND_DATA; + + RM3QuerySerializationResult rm3qsr = replica->QuerySerialization(this); + if (rm3qsr==RM3QSR_NEVER_CALL_SERIALIZE) + { + // Never again for this connection and replica pair + OnNeverSerialize(lsr, replicaManager); + return SSICR_NEVER_SERIALIZE; + } + + if (rm3qsr==RM3QSR_DO_NOT_CALL_SERIALIZE) + return SSICR_DID_NOT_SEND_DATA; + + if (replica->forceSendUntilNextUpdate) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (replica->lastSentSerialization.indicesToSend[z]) + sp->bitsWrittenSoFar+=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed(); + } + return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, replica->lastSentSerialization.bitStream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) + { + sp->outputBitstream[i].Reset(); + if (lsr->lastSerializationResultBS) + sp->lastSentBitstream[i]=&lsr->lastSerializationResultBS->bitStream[i]; + else + sp->lastSentBitstream[i]=&replica->lastSentSerialization.bitStream[i]; + } + + RM3SerializationResult serializationResult = replica->Serialize(sp); + + if (serializationResult==RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION) + { + // Never again for this connection and replica pair + OnNeverSerialize(lsr, replicaManager); + return SSICR_NEVER_SERIALIZE; + } + + if (serializationResult==RM3SR_DO_NOT_SERIALIZE) + { + // Don't serialize this tick only + return SSICR_DID_NOT_SEND_DATA; + } + + // This is necessary in case the user in the Serialize() function for some reason read the bitstream they also wrote + // WIthout this code, the Write calls to another bitstream would not write the entire bitstream + BitSize_t sum=0; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp->outputBitstream[z].ResetReadPointer(); + sum+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + } + + if (sum==0) + { + // Don't serialize this tick only + return SSICR_DID_NOT_SEND_DATA; + } + + if (serializationResult==RM3SR_SERIALIZED_ALWAYS) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + + lsr->AllocBS(); + lsr->lastSerializationResultBS->bitStream[z].Reset(); + lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + } + return SendSerialize(replica, allIndices, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + if (serializationResult==RM3SR_SERIALIZED_ALWAYS_IDENTICALLY) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + replica->lastSentSerialization.indicesToSend[z]=sp->outputBitstream[z].GetNumberOfBitsUsed()>0; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + replica->lastSentSerialization.bitStream[z].Reset(); + replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + replica->forceSendUntilNextUpdate=true; + } + return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); + } + + bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) + { + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && + (serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION || + ((sp->outputBitstream[z].GetNumberOfBitsUsed()!=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed() || + memcmp(sp->outputBitstream[z].GetData(), replica->lastSentSerialization.bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0)))) + { + indicesToSend[z]=true; + replica->lastSentSerialization.indicesToSend[z]=true; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + replica->lastSentSerialization.bitStream[z].Reset(); + replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + replica->forceSendUntilNextUpdate=true; + } + else + { + indicesToSend[z]=false; + replica->lastSentSerialization.indicesToSend[z]=false; + } + } + } + else + { + lsr->AllocBS(); + + // RM3SR_SERIALIZED_UNIQUELY + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && + (sp->outputBitstream[z].GetNumberOfBitsUsed()!=lsr->lastSerializationResultBS->bitStream[z].GetNumberOfBitsUsed() || + memcmp(sp->outputBitstream[z].GetData(), lsr->lastSerializationResultBS->bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0) + ) + { + indicesToSend[z]=true; + sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); + lsr->lastSerializationResultBS->bitStream[z].Reset(); + lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); + sp->outputBitstream[z].ResetReadPointer(); + } + else + { + indicesToSend[z]=false; + } + } + } + + + if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) + replica->forceSendUntilNextUpdate=true; + + // Send out the data + return SendSerialize(replica, indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +void Connection_RM3::OnLocalReference(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + RakAssert(replica3); + (void) replicaManager; + (void) constructionMode; + +#ifdef _DEBUG + for (unsigned int i=0; i < queryToConstructReplicaList.Size(); i++) + { + if (queryToConstructReplicaList[i]->replica==replica3) + { + RakAssert("replica added twice to queryToConstructReplicaList" && 0); + } + } + + if (constructedReplicaList.HasData(replica3)==true) + { + RakAssert("replica added to queryToConstructReplicaList when already in constructedReplicaList" && 0); + } +#endif + + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica3; + queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDereference(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + if (replica3->GetNetworkIDManager() == 0) + return; + + LastSerializationResult* lsr=0; + unsigned int idx; + + bool objectExists; + idx=constructedReplicaList.GetIndexFromKey(replica3, &objectExists); + if (objectExists) + { + lsr=constructedReplicaList[idx]; + constructedReplicaList.RemoveAtIndex(idx); + } + + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica==replica3) + { + lsr=queryToConstructReplicaList[idx]; + queryToConstructReplicaList.RemoveAtIndex(idx); + break; + } + } + + for (idx=0; idx < queryToSerializeReplicaList.Size(); idx++) + { + if (queryToSerializeReplicaList[idx]->replica==replica3) + { + lsr=queryToSerializeReplicaList[idx]; + queryToSerializeReplicaList.RemoveAtIndex(idx); + break; + } + } + + for (idx=0; idx < queryToDestructReplicaList.Size(); idx++) + { + if (queryToDestructReplicaList[idx]->replica==replica3) + { + lsr=queryToDestructReplicaList[idx]; + queryToDestructReplicaList.RemoveAtIndex(idx); + break; + } + } + + ValidateLists(replicaManager); + + if (lsr) + RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadFromThisSystem(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + RakAssert(replica3); + + ValidateLists(replicaManager); + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica3; + + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int j; + for (j=0; j < queryToConstructReplicaList.Size(); j++) + { + if (queryToConstructReplicaList[j]->replica==replica3 ) + { + queryToConstructReplicaList.RemoveAtIndex(j); + break; + } + } + + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + } + + if (constructedReplicaList.Insert(lsr->replica, lsr, true, _FILE_AND_LINE_) != (unsigned) -1) + { + //assert(queryToSerializeReplicaList.GetIndexOf(replica3)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + } + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadFromOtherSystem(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int j; + for (j=0; j < queryToConstructReplicaList.Size(); j++) + { + if (queryToConstructReplicaList[j]->replica==replica3 ) + { + return; + } + } + + OnLocalReference(replica3, replicaManager); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnNeverConstruct(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnConstructToThisConnection(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); + //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnConstructToThisConnection(Replica3 *replica, ReplicaManager3 *replicaManager) +{ + RakAssert(replica); + RakAssert(QueryConstructionMode()==QUERY_CONNECTION_FOR_REPLICA_LIST); + (void) replicaManager; + + LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); + lsr->replica=replica; + constructedReplicaList.Insert(replica,lsr,true,_FILE_AND_LINE_); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnNeverSerialize(LastSerializationResult *lsr, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]==lsr ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnReplicaAlreadyExists(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; + queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); + //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); + //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); + //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDownloadExisting(Replica3* replica3, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + + ConstructionMode constructionMode = QueryConstructionMode(); + if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) + { + unsigned int idx; + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica==replica3) + { + OnConstructToThisConnection(idx, replicaManager); + return; + } + } + } + else + { + OnConstructToThisConnection(replica3, replicaManager); + } +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnSendDestructionFromQuery(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) +{ + ConstructionMode constructionMode = QueryConstructionMode(); + RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); + (void) constructionMode; + + ValidateLists(replicaManager); + LastSerializationResult* lsr = queryToDestructReplicaList[queryToDestructIdx]; + queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); + unsigned int j; + for (j=0; j < queryToSerializeReplicaList.Size(); j++) + { + if (queryToSerializeReplicaList[j]->replica==lsr->replica ) + { + queryToSerializeReplicaList.RemoveAtIndex(j); + break; + } + } + for (j=0; j < constructedReplicaList.Size(); j++) + { + if (constructedReplicaList[j]->replica==lsr->replica ) + { + constructedReplicaList.RemoveAtIndex(j); + break; + } + } + //assert(queryToConstructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); + queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::OnDoNotQueryDestruction(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) +{ + ValidateLists(replicaManager); + queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); + ValidateLists(replicaManager); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::ValidateLists(ReplicaManager3 *replicaManager) const +{ + (void) replicaManager; + /* +#ifdef _DEBUG + // Each object should exist only once in either constructedReplicaList or queryToConstructReplicaList + // replicaPointer from LastSerializationResult should be same among all lists + unsigned int idx, idx2; + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + idx2=queryToConstructReplicaList.GetIndexOf(constructedReplicaList[idx]->replica); + if (idx2!=(unsigned int)-1) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + idx2=constructedReplicaList.GetIndexOf(queryToConstructReplicaList[idx]->replica); + if (idx2!=(unsigned int)-1) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + LastSerializationResult *lsr, *lsr2; + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + lsr=constructedReplicaList[idx]; + + idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToSerializeReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToDestructReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + } + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + lsr=queryToConstructReplicaList[idx]; + + idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToSerializeReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); + if (idx2!=(unsigned int)-1) + { + lsr2=queryToDestructReplicaList[idx2]; + if (lsr2!=lsr) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + } + + // Verify pointer integrity + for (idx=0; idx < constructedReplicaList.Size(); idx++) + { + if (constructedReplicaList[idx]->replica->replicaManager!=replicaManager) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } + + // Verify pointer integrity + for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) + { + if (queryToConstructReplicaList[idx]->replica->replicaManager!=replicaManager) + { + int a=5; + assert(a==0); + int *b=0; + *b=5; + } + } +#endif + */ +} +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::SendConstruction(DataStructures::List &newObjects, DataStructures::List &deletedObjects, PRO sendParameters, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager3) +{ + if (newObjects.Size()==0 && deletedObjects.Size()==0) + return; + + // All construction and destruction takes place in the same network message + // Otherwise, if objects rely on each other being created the same tick to be valid, this won't always be true + // DataStructures::List serializedObjects; + BitSize_t offsetStart, offsetStart2, offsetEnd; + unsigned int newListIndex, oldListIndex; + RakNet::BitStream bsOut; + NetworkID networkId; + if (isFirstConstruction) + { + bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_STARTED); + bsOut.Write(worldId); + SerializeOnDownloadStarted(&bsOut); + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + } + + // LastSerializationResult* lsr; + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); + bsOut.Write(worldId); + uint16_t objectSize = (uint16_t) newObjects.Size(); + bsOut.Write(objectSize); + + // Construction + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); // overwritten to point to the end of the stream + networkId=newObjects[newListIndex]->GetNetworkID(); + bsOut.Write(networkId); + + RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); + bool actuallyCreateObject = cs==RM3CS_SEND_CONSTRUCTION; + bsOut.Write(actuallyCreateObject); + bsOut.AlignWriteToByteBoundary(); + + if (actuallyCreateObject) + { + // Actually create the object + bsOut.Write(newObjects[newListIndex]->creatingSystemGUID); + offsetStart2=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart2); // overwritten to point to after the call to WriteAllocationID + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + newObjects[newListIndex]->WriteAllocationID(this, &bsOut); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart2); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + newObjects[newListIndex]->SerializeConstruction(&bsOut, this); + } + else + { + newObjects[newListIndex]->SerializeConstructionExisting(&bsOut, this); + } + + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + + RakNet::BitStream bsOut2; + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + bsOut2.Reset(); + RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); + if (cs==RM3CS_SEND_CONSTRUCTION) + { + newObjects[newListIndex]->PostSerializeConstruction(&bsOut2, this); + } + else + { + RakAssert(cs==RM3CS_ALREADY_EXISTS_REMOTELY); + newObjects[newListIndex]->PostSerializeConstructionExisting(&bsOut2, this); + } + if (bsOut2.GetNumberOfBitsUsed()>0) + { + bsOut.Write(true); + bsOut.AlignWriteToByteBoundary(); + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); // overwritten to point to the end of the stream + networkId=newObjects[newListIndex]->GetNetworkID(); + bsOut.Write(networkId); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + bsOut.Write(&bsOut2); + bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + else + bsOut.Write(false); + } + bsOut.AlignWriteToByteBoundary(); + + // Destruction + objectSize = (uint16_t) deletedObjects.Size(); + bsOut.Write(objectSize); + for (oldListIndex=0; oldListIndex < deletedObjects.Size(); oldListIndex++) + { + networkId=deletedObjects[oldListIndex]->GetNetworkID(); + bsOut.Write(networkId); + offsetStart=bsOut.GetWriteOffset(); + bsOut.Write(offsetStart); + deletedObjects[oldListIndex]->deletingSystemGUID=rakPeer->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); + bsOut.Write(deletedObjects[oldListIndex]->deletingSystemGUID); + deletedObjects[oldListIndex]->SerializeDestruction(&bsOut, this); + bsOut.AlignWriteToByteBoundary(); + offsetEnd=bsOut.GetWriteOffset(); + bsOut.SetWriteOffset(offsetStart); + bsOut.Write(offsetEnd); + bsOut.SetWriteOffset(offsetEnd); + } + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + + // TODO - shouldn't this be part of construction? + + // Initial Download serialize to a new system + // Immediately send serialize after construction if the replica object already has saved data + // If the object was serialized identically, and does not change later on, then the new connection never gets the data + SerializeParameters sp; + sp.whenLastSerialized=0; + RakNet::BitStream emptyBs; + for (int index=0; index < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) + { + sp.lastSentBitstream[index]=&emptyBs; + sp.pro[index]=sendParameters; + sp.pro[index].reliability=RELIABLE_ORDERED; + } + + sp.bitsWrittenSoFar=0; +// RakNet::Time t = RakNet::GetTimeMS(); + for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) + { + sp.destinationConnection=this; + sp.messageTimestamp=0; + RakNet::Replica3 *replica = newObjects[newListIndex]; + // 8/22/09 Forgot ResetWritePointer + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.outputBitstream[z].ResetWritePointer(); + } + + RM3SerializationResult res = replica->Serialize(&sp); + if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && + res!=RM3SR_DO_NOT_SERIALIZE && + res!=RM3SR_SERIALIZED_UNIQUELY) + { + bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; + for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) + { + sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); + allIndices[z]=true; + } + SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, rakPeer, worldId, GetTime()); +/// newObjects[newListIndex]->whenLastSerialized=t; + + } + // else wait for construction request accepted before serializing + } + + if (isFirstConstruction) + { + bsOut.Reset(); + bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE); + bsOut.Write(worldId); + SerializeOnDownloadComplete(&bsOut); + rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); + } + + isFirstConstruction=false; + +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Connection_RM3::SendValidation(RakNet::RakPeerInterface *rakPeer, WorldId worldId) +{ + // Hijack to mean sendValidation + RakNet::BitStream bsOut; + bsOut.Write((MessageID)ID_REPLICA_MANAGER_SCOPE_CHANGE); + bsOut.Write(worldId); + rakPeer->Send(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,systemAddress,false); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3::Replica3() +{ + creatingSystemGUID=UNASSIGNED_RAKNET_GUID; + deletingSystemGUID=UNASSIGNED_RAKNET_GUID; + replicaManager=0; + forceSendUntilNextUpdate=false; + lsr=0; + referenceIndex = (uint32_t)-1; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +Replica3::~Replica3() +{ + if (replicaManager) + { + replicaManager->Dereference(this); + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +void Replica3::BroadcastDestruction(void) +{ + replicaManager->BroadcastDestruction(this,UNASSIGNED_SYSTEM_ADDRESS); +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RakNetGUID Replica3::GetCreatingSystemGUID(void) const +{ + return creatingSystemGUID; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_ClientConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3CS_SEND_CONSTRUCTION; + // Send back to the owner client too, because they couldn't assign the network ID + if (isThisTheServer) + return RM3CS_SEND_CONSTRUCTION; + return RM3CS_NEVER_CONSTRUCT; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_ClientConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) +{ + (void) sourceConnection; + (void) isThisTheServer; + + // OK to create + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_ServerConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + + if (isThisTheServer) + return RM3CS_SEND_CONSTRUCTION; + return RM3CS_NEVER_CONSTRUCT; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_ServerConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) +{ + (void) sourceConnection; + if (isThisTheServer) + return false; + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ConstructionState Replica3::QueryConstruction_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) +{ + (void) destinationConnection; + + if (p2pMode==R3P2PM_SINGLE_OWNER) + { + // We send to all, others do nothing + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3CS_SEND_CONSTRUCTION; + + // RM3CS_NEVER_CONSTRUCT will not send the object, and will not Serialize() it + return RM3CS_NEVER_CONSTRUCT; + } + else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_SEND_CONSTRUCTION; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_ALREADY_EXISTS_REMOTELY; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) + { + return RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT; + } + else + { + RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); + + // RM3CS_ALREADY_EXISTS_REMOTELY will not send the object, but WILL call QuerySerialization() and Serialize() on it. + return RM3CS_ALREADY_EXISTS_REMOTELY; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +bool Replica3::QueryRemoteConstruction_PeerToPeer(RakNet::Connection_RM3 *sourceConnection) +{ + (void) sourceConnection; + + return true; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_ClientSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + // Owner client sends to all + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3QSR_CALL_SERIALIZE; + // Server sends to all but owner client + if (isThisTheServer && destinationConnection->GetRakNetGUID()!=creatingSystemGUID) + return RM3QSR_CALL_SERIALIZE; + // Remote clients do not send + return RM3QSR_NEVER_CALL_SERIALIZE; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_ServerSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) +{ + (void) destinationConnection; + // Server sends to all + if (isThisTheServer) + return RM3QSR_CALL_SERIALIZE; + + // Clients do not send + return RM3QSR_NEVER_CALL_SERIALIZE; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3QuerySerializationResult Replica3::QuerySerialization_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) +{ + (void) destinationConnection; + + if (p2pMode==R3P2PM_SINGLE_OWNER) + { + // Owner peer sends to all + if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) + return RM3QSR_CALL_SERIALIZE; + + // Remote peers do not send + return RM3QSR_NEVER_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_CALL_SERIALIZE; + } + else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) + { + return RM3QSR_DO_NOT_CALL_SERIALIZE; + } + else + { + RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); + return RM3QSR_DO_NOT_CALL_SERIALIZE; + } +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Client(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Server(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_PeerToPeer(RakNet::Connection_RM3 *droppedConnection) const +{ + (void) droppedConnection; + return RM3AOPC_DELETE_REPLICA; +} + +// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + +#endif // _RAKNET_SUPPORT_* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh new file mode 100644 index 0000000..72b0a36 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e +MODE=${1} +echo running in mode ${MODE} ... +mkdir -p build/${MODE} +pushd build/${MODE} +cmake ../.. -DCMAKE_BUILD_TYPE=$MODE -DCMAKE_MODULE_PATH=$(pwd)/../../cmaki -DFIRST_ERROR=1 +cmake --build . --config $MODE --target install -- -j8 -k || cmake --build . --config ${MODE} --target install -- -j1 +ctest . --no-compress-output --output-on-failure -T Test -C ${MODE} -V +popd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh new file mode 100644 index 0000000..967bf29 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +function print_if_has_content() +{ + file=$1 + minimumsize=400 + actualsize=$(wc -c <"$file") + if [ $actualsize -ge $minimumsize ]; + then + cat $file + fi +} + +echo Running test: $1 +export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer-3.6) +export ASAN_OPTIONS="check_initialization_order=1" +rm $1.coverage 2> /dev/null +rm $1.gcno 2> /dev/null +rm default.profraw 2> /dev/null +./$1 +ret=$? +llvm-profdata-3.6 merge -o $1.gcno default.profraw 2> /dev/null +llvm-cov-3.6 show ./$1 -instr-profile=$1.gcno > $1.coverage +cat $1.coverage | ansi2html > $1.html +print_if_has_content $1.html +exit $ret + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py new file mode 100644 index 0000000..66f01d7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py @@ -0,0 +1,175 @@ +import os +import utils +import logging +from third_party import platforms +from third_party import build_unittests_foldername +from itertools import product +from third_party import get_identifier + +def run_tests(node, parameters, compiler_replace_maps, unittests): + + old_cwd = os.getcwd() + + rootdir = parameters.rootdir + rootdir = utils.get_norm_path(rootdir) + rootdir = rootdir.replace('\\', '/') + + cmakelib_dir = parameters.cmakefiles + cmakelib_dir = utils.get_norm_path(cmakelib_dir) + cmakelib_dir = cmakelib_dir.replace('\\', '/') + + cmake3p_dir = parameters.prefix + cmake3p_dir = utils.get_norm_path(cmake3p_dir) + cmake3p_dir = cmake3p_dir.replace('\\', '/') + + cmake_prefix = parameters.prefix + cmake_prefix = utils.get_norm_path(cmake_prefix) + cmake_prefix = cmake_prefix.replace('\\', '/') + + cmake_third_party_dir = parameters.third_party_dir + cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) + cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') + + package = node.get_package_name() + package_upper = node.get_package_name_norm_upper() + version = node.get_version() + packing = node.is_packing() + if not packing: + logging.warning("No need run_tests, because wasn't generated a package") + return 0 + + # prepare unittests + # can be a file or content + unittest_value = node.get_unittest() + if unittest_value is not None: + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, build_modes): + builddir = node.get_build_directory(plat, build_mode) + path_test = os.path.join(builddir, build_unittests_foldername) + utils.trymkdir(path_test) + + # is is a file + unittest_path = os.path.join(builddir, unittest_value) + if os.path.isfile(unittest_path): + with open(unittest_path, 'rt') as f: + unittest_value = f.read() + + with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: + f.write(unittest_value) + + if parameters.fast: + logging.debug('skipping for because is in fast mode: "prepare"') + break + else: + logging.warning('[%s] No test present.' % package) + + folder_3rdparty = parameters.third_party_dir + output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) + + build_modes = node.get_build_modes() + for plat, build_mode in product(platforms, reversed(build_modes)): + for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): + # verify md5sum + install_directory = node.get_install_directory(plat) + workspace = node.get_workspace(plat) + utils.trymkdir(install_directory) + with utils.working_directory(install_directory): + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) + prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) + if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): + with open(prefix_package_md5, 'rt') as f: + md5sum = f.read().strip() + + try: + logging.debug("expected md5: %s" % md5sum) + for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): + if len(line) > 0: + # md5sum filename + chunks = line.split(' ') + chunks = list(filter(None, chunks)) + assert(len(chunks) > 0) + md5sum_real = chunks[0] + logging.debug("real md5: %s" % md5sum_real) + + if (md5sum != md5sum_real): + logging.error('Error en generated md5sum file!!!') + logging.error('Expected: %s' % md5sum) + logging.error('Found: %s' % md5sum_real) + # add error to node + node.ret += 1 + except utils.NotFoundProgram: + logging.info('can\'t verify md5 because not found cmake') + else: + logging.warning('Skipping verification md5 because don\'t exists package or md5') + + logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) + + # OJO con borrar cmake3p, se borra la marca + # node.remove_cmake3p( cmake3p_dir ) + + builddir = os.path.join(old_cwd, node.get_build_directory(plat, build_mode)) + logging.info('Using builddir %s' % builddir) + unittest_folder = os.path.join(builddir, build_unittests_foldername) + unittest_found = os.path.join(unittest_folder, 'main.cpp') + unittest_found = unittest_found.replace('\\', '/') + unittest_root = os.path.join(old_cwd, build_unittests_foldername) + + if os.path.exists(unittest_found): + + logging.info('Search cmakelib in %s' % cmakelib_dir) + if os.path.isdir(os.path.join(cmakelib_dir)): + + with utils.working_directory(unittest_folder): + + generator_extra = '' + if generator is not None: + generator_extra = '-G"%s"' % generator + + find_packages = [] + find_packages.append(package) + for dep in node.get_depends_raw(): + package_name = dep.get_package_name() + find_packages.append(package_name) + find_packages_str = ';'.join(find_packages) + + # remove CMakeCache.txt for avoid problems when + # change of generator + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + + cmd = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % ( + unittest_root, + generator_extra, + cmake_prefix, + get_identifier('COMPILER'), + get_identifier('ALL'), + cmakelib_dir, + package, + package_upper, + build_mode, + cmake_third_party_dir, + unittest_found, + cmake_prefix, + find_packages_str, + build_mode, + build_mode) + ret = utils.safe_system(cmd, env=env_modified) + node.ret += abs(ret) + if ret != 0: + unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' + else: + unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' + else: + unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' + else: + unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' + + if node.ret != 0: + logging.warning('Cleaning packages because tests are failed.') + node.remove_packages() + + # successful + return True + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py new file mode 100755 index 0000000..57fd37a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py @@ -0,0 +1,31 @@ +import os +import sys +import logging +import argparse +import urllib +import csv +import utils +import subprocess + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--name', required=True, dest='name', help='name package', default=None) + parser.add_argument('--version', required=True, dest='version', help='version package fixed', default=None) + parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) + parameters = parser.parse_args() + + depends_file = parameters.depends + if os.path.exists(depends_file): + data = utils.deserialize(depends_file) + else: + data = {} + # serialize if is new data + if parameters.name not in data: + data[parameters.name] = parameters.version + logging.info('serialize data = %s' % data) + depends_file_tmp = depends_file + '.tmp' + utils.serialize(data, depends_file_tmp) + ret = subprocess.call('python -m json.tool %s > %s' % (depends_file_tmp, depends_file), shell=True) + os.remove(depends_file_tmp) + sys.exit(ret) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt new file mode 100644 index 0000000..6683d9c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt @@ -0,0 +1,1366 @@ +cmake_minimum_required(VERSION 2.8) +project(SDL2 C) +include(CheckFunctionExists) +include(CheckLibraryExists) +include(CheckIncludeFiles) +include(CheckIncludeFile) +include(CheckSymbolExists) +include(CheckCSourceRuns) +include(CheckCCompilerFlag) +include(CheckTypeSize) +include(CheckStructHasMember) +include(CMakeDependentOption) +include(FindPkgConfig) +set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") +include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) +include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) + +# General settings +# Edit include/SDL_version.h and change the version, then: +# SDL_MICRO_VERSION += 1; +# SDL_INTERFACE_AGE += 1; +# SDL_BINARY_AGE += 1; +# if any functions have been added, set SDL_INTERFACE_AGE to 0. +# if backwards compatibility has been broken, +# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. +set(SDL_MAJOR_VERSION 2) +set(SDL_MINOR_VERSION 0) +set(SDL_MICRO_VERSION 3) +set(SDL_INTERFACE_AGE 1) +set(SDL_BINARY_AGE 3) +set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") + +# Calculate a libtool-like version number +math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") +set(LT_REVISION "${SDL_INTERFACE_AGE}") +set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") +set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") + +message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") + +# General settings & flags +set(LIBRARY_OUTPUT_DIRECTORY "build") +# Check for 64 or 32 bit +set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) +if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(ARCH_64 TRUE) + set(PROCESSOR_ARCH "x64") +else() + set(ARCH_64 FALSE) + set(PROCESSOR_ARCH "x86") +endif() +set(LIBNAME SDL2) +if(NOT LIBTYPE) + set(LIBTYPE SHARED) +endif() + +# Get the platform +if(WIN32) + if(NOT WINDOWS) + set(WINDOWS TRUE) + endif() +elseif(UNIX AND NOT APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") + set(LINUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") + set(NETBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") + set(OPENBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") + set(GNU TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") + set(BSDI TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") + set(SYSV5 TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") + set(SOLARIS TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") + set(HPUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") + set(AIX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") + set(MINIX TRUE) + endif() +elseif(APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") + set(DARWIN TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") + set(MACOSX TRUE) + endif() + # TODO: iOS? +elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") + message_error("BeOS support has been removed as of SDL 2.0.2.") +elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") + set(HAIKU TRUE) +endif() + +# Don't mistake osx for unix +if(UNIX AND NOT APPLE) + set(UNIX_SYS ON) +else() + set(UNIX_SYS OFF) +endif() + +if(UNIX OR APPLE) + set(UNIX_OR_MAC_SYS ON) +else() + set(UNIX_OR_MAC_SYS OFF) +endif() + +if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. + set(PTHREADS_ENABLED_BY_DEFAULT ON) +else() + set(PTHREADS_ENABLED_BY_DEFAULT OFF) +endif() + +# Default option knobs +if(APPLE OR ARCH_64) + set(OPT_DEF_SSEMATH ON) +endif() +if(UNIX OR MINGW OR MSYS) + set(OPT_DEF_LIBC ON) +endif() + +# Compiler info +if(CMAKE_COMPILER_IS_GNUCC) + set(USE_GCC TRUE) + set(OPT_DEF_ASM TRUE) +elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") + set(USE_CLANG TRUE) + set(OPT_DEF_ASM TRUE) +elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ + set(OPT_DEF_ASM TRUE) + #set(CMAKE_C_FLAGS "/ZI /WX- / +else() + set(OPT_DEF_ASM FALSE) +endif() + +# Default flags, if not set otherwise +if("$ENV{CFLAGS}" STREQUAL "") + if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "-g -O3") + endif() +else() + set(CMAKE_C_FLAGS "$ENV{CFLAGS}") + list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") +endif() +if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 + list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") +endif() + +if(MSVC) + option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) + if(FORCE_STATIC_VCRT) + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach() + endif() +endif() + +# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, +# etc. are created correctly. +set(SDL_LIBS "-lSDL2") +set(SDL_CFLAGS "") + +# Emscripten toolchain has a nonempty default value for this, and the checks +# in this file need to change that, so remember the original value, and +# restore back to that afterwards. For check_function_exists() to work in +# Emscripten, this value must be at its default value. +set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) + +if(CYGWIN) + # We build SDL on cygwin without the UNIX emulation layer + include_directories("-I/usr/include/mingw") + set(CMAKE_REQUIRED_FLAGS "-mno-cygwin") + check_c_source_compiles("int main(int argc, char **argv) {}" + HAVE_GCC_NO_CYGWIN) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_GCC_NO_CYGWIN) + list(APPEND EXTRA_LDFLAGS "-mno-cygwin") + list(APPEND SDL_LIBS "-mno-cygwin") + endif() + set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") +endif() + +add_definitions(-DUSING_GENERATED_CONFIG_H) +# General includes +include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) + +if(EMSCRIPTEN) + # Set up default values for the currently supported set of subsystems: + # Emscripten/Javascript does not have assembly support, a dynamic library + # loading architecture, low-level CPU inspection or multithreading. + set(OPT_DEF_ASM FALSE) + set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) + set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) + set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) + set(DLOPEN_ENABLED_BY_DEFAULT OFF) +else() + set(SDL_SHARED_ENABLED_BY_DEFAULT ON) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT ON) + set(SDL_THREADS_ENABLED_BY_DEFAULT ON) + set(SDL_LOADSO_ENABLED_BY_DEFAULT ON) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT ON) + set(DLOPEN_ENABLED_BY_DEFAULT ON) +endif() + +set(SDL_SUBSYSTEMS + Atomic Audio Video Render Events Joystick Haptic Power Threads Timers + File Loadso CPUinfo Filesystem) +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) + set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) + endif() + option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) +endforeach() + +option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") +#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) +set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) +set_option(GCC_ATOMICS "Use gcc builtin atomics" ${USE_GCC}) +set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) +set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) +set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) +set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) +set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) +set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) +set_option(DISKAUDIO "Support the disk writer audio driver" ON) +set_option(DUMMYAUDIO "Support the dummy audio driver" ON) +set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) +dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) +set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) +dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND_SHARED" OFF) +set_option(VIDEO_DUMMY "Use dummy video driver" ON) +set_option(VIDEO_OPENGL "Include OpenGL support" ON) +set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) +set_option(PTHREADS "Use POSIX threads for multi-threading" ${PTHREADS_ENABLED_BY_DEFAULT}) +dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) +set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${DLOPEN_ENABLED_BY_DEFAULT}) +set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) +set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) +dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) +set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) +dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) +set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) +dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) +set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) +dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) +set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) +set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) +set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) +set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) +set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) +set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) +set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) +set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) +set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) +dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) +set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) +foreach(_SUB ${SDL_X11_OPTIONS}) + string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) + dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) +endforeach() +set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) +set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) +set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) + +# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here +# The options below are for compatibility to configure's default behaviour. +set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") +set(SDL_STATIC ON CACHE BOOL "Build a static version of the library") + +# General source files +file(GLOB SOURCE_FILES + ${SDL2_SOURCE_DIR}/src/*.c + ${SDL2_SOURCE_DIR}/src/atomic/*.c + ${SDL2_SOURCE_DIR}/src/audio/*.c + ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c + ${SDL2_SOURCE_DIR}/src/dynapi/*.c + ${SDL2_SOURCE_DIR}/src/events/*.c + ${SDL2_SOURCE_DIR}/src/file/*.c + ${SDL2_SOURCE_DIR}/src/libm/*.c + ${SDL2_SOURCE_DIR}/src/render/*.c + ${SDL2_SOURCE_DIR}/src/render/*/*.c + ${SDL2_SOURCE_DIR}/src/stdlib/*.c + ${SDL2_SOURCE_DIR}/src/thread/*.c + ${SDL2_SOURCE_DIR}/src/timer/*.c + ${SDL2_SOURCE_DIR}/src/video/*.c) + + +if(ASSERTIONS STREQUAL "auto") + # Do nada - use optimization settings to determine the assertion level +elseif(ASSERTIONS STREQUAL "disabled") + set(SDL_DEFAULT_ASSERT_LEVEL 0) +elseif(ASSERTIONS STREQUAL "release") + set(SDL_DEFAULT_ASSERT_LEVEL 1) +elseif(ASSERTIONS STREQUAL "enabled") + set(SDL_DEFAULT_ASSERT_LEVEL 2) +elseif(ASSERTIONS STREQUAL "paranoid") + set(SDL_DEFAULT_ASSERT_LEVEL 3) +else() + message_error("unknown assertion level") +endif() +set(HAVE_ASSERTIONS ${ASSERTIONS}) + +# Compiler option evaluation +if(USE_GCC OR USE_CLANG) + if(DEPENDENCY_TRACKING) + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 3 + #error Dependency tracking requires GCC 3.0 or newer + #endif + int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) + endif() + + if(GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + void *x, *y, *z; + __sync_lock_test_and_set(&a, 4); + __sync_lock_test_and_set(&x, y); + __sync_fetch_and_add(&a, 1); + __sync_bool_compare_and_swap(&a, 5, 10); + __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) + if(NOT HAVE_GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + __sync_lock_test_and_set(&a, 1); + __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) + endif() + endif() + + set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") + check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" + HAVE_GCC_PREFERRED_STACK_BOUNDARY) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror -Wno-error=implicit-function-declaration") + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 4 + #error SDL only uses visibility attributes in GCC 4 or newer + #endif + int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) + if(HAVE_GCC_FVISIBILITY) + list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + check_c_compiler_flag(-Wall HAVE_GCC_WALL) + if(HAVE_GCC_WALL) + if(HAIKU) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") + endif() + endif() +endif() + +if(ASSEMBLY) + if(USE_GCC OR USE_CLANG) + set(SDL_ASSEMBLY_ROUTINES 1) + # TODO: Those all seem to be quite GCC specific - needs to be + # reworked for better compiler support + set(HAVE_ASSEMBLY TRUE) + if(MMX) + set(CMAKE_REQUIRED_FLAGS "-mmmx") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __MMX__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_MMX) + if(HAVE_MMX) + list(APPEND EXTRA_CFLAGS "-mmmx") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(3DNOW) + set(CMAKE_REQUIRED_FLAGS "-m3dnow") + check_c_source_compiles(" + #include + #ifndef __3dNOW__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { + void *p = 0; + _m_prefetch(p); + }" HAVE_3DNOW) + if(HAVE_3DNOW) + list(APPEND EXTRA_CFLAGS "-m3dnow") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE) + set(CMAKE_REQUIRED_FLAGS "-msse") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE) + if(HAVE_SSE) + list(APPEND EXTRA_CFLAGS "-msse") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE2) + set(CMAKE_REQUIRED_FLAGS "-msse2") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE2__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE2) + if(HAVE_SSE2) + list(APPEND EXTRA_CFLAGS "-msse2") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSEMATH) + if(SSE OR SSE2) + if(USE_GCC) + list(APPEND EXTRA_CFLAGS "-mfpmath=387") + endif() + set(HAVE_SSEMATH TRUE) + endif() + endif() + + if(ALTIVEC) + set(CMAKE_REQUIRED_FLAGS "-maltivec") + check_c_source_compiles(" + #include + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) + check_c_source_compiles(" + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set + list(APPEND EXTRA_CFLAGS "-maltivec") + set(SDL_ALTIVEC_BLITTERS 1) + if(HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC_H 1) + endif() + endif() + endif() + elseif(MSVC_VERSION GREATER 1500) + # TODO: SDL_cpuinfo.h needs to support the user's configuration wish + # for MSVC - right now it is always activated + if(NOT ARCH_64) + set(HAVE_MMX TRUE) + set(HAVE_3DNOW TRUE) + endif() + set(HAVE_SSE TRUE) + set(HAVE_SSE2 TRUE) + set(SDL_ASSEMBLY_ROUTINES 1) + endif() +# TODO: +#else() +# if(USE_GCC OR USE_CLANG) +# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-mmx") +# endif() +endif() + +# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define +# anything. +if(LIBC) + if(WINDOWS AND NOT MINGW) + set(HAVE_LIBC TRUE) + foreach(_HEADER stdio.h string.h ctype.h math.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + set(${_HAVE_H} 1) + endforeach() + set(HAVE_SIGNAL_H 1) + foreach(_FN + malloc calloc realloc free qsort abs memset memcpy memmove memcmp + strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp + _stricmp _strnicmp sscanf atan atan2 acos asin ceil copysign cos + cosf fabs floor log pow scalbn sin sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(HAVE_${_UPPER} 1) + endforeach() + if(NOT CYGWIN AND NOT MINGW) + set(HAVE_ALLOCA 1) + endif() + set(HAVE_M_PI 1) + add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI + set(STDC_HEADERS 1) + else() + set(HAVE_LIBC TRUE) + check_include_file(sys/types.h HAVE_SYS_TYPES_H) + foreach(_HEADER + stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h + strings.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + check_include_file("${_HEADER}" ${_HAVE_H}) + endforeach() + + check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) + check_type_size("size_t" SIZEOF_SIZE_T) + check_symbol_exists(M_PI math.h HAVE_M_PI) + # TODO: refine the mprotect check + check_c_source_compiles("#include + #include + int main() { }" HAVE_MPROTECT) + foreach(_FN + strtod malloc calloc realloc free getenv setenv putenv unsetenv + qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat + strdup _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull + atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp + vsscanf vsnprintf fseeko fseeko64 sigaction setjmp + nanosleep sysconf sysctlbyname + ) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + + check_library_exists(m pow "" HAVE_LIBM) + if(HAVE_LIBM) + set(CMAKE_REQUIRED_LIBRARIES m) + foreach(_FN + atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin + sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + set(CMAKE_REQUIRED_LIBRARIES) + list(APPEND EXTRA_LIBS m) + endif() + + check_library_exists(iconv iconv_open "" HAVE_LIBICONV) + if(HAVE_LIBICONV) + list(APPEND EXTRA_LIBS iconv) + endif() + + check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) + endif() +else() + if(WINDOWS) + set(HAVE_STDARG_H 1) + set(HAVE_STDDEF_H 1) + endif() +endif() + + +# Enable/disable various subsystems of the SDL library +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if(NOT SDL_${_OPT}) + set(SDL_${_OPT}_DISABLED 1) + endif() +endforeach() +if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) +endif() +if(SDL_HAPTIC) + if(NOT SDL_JOYSTICK) + # Haptic requires some private functions from the joystick subsystem. + message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") + endif() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(SDL_POWER) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) +endif() +# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: +# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set +# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), +# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings + +# General SDL subsystem options, valid for all platforms +if(SDL_AUDIO) + # CheckDummyAudio/CheckDiskAudio - valid for all platforms + if(DUMMYAUDIO) + set(SDL_AUDIO_DRIVER_DUMMY 1) + file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) + set(HAVE_DUMMYAUDIO TRUE) + endif() + if(DISKAUDIO) + set(SDL_AUDIO_DRIVER_DISK 1) + file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) + set(HAVE_DISKAUDIO TRUE) + endif() +endif() + +if(SDL_DLOPEN) + # Relevant for Unix/Darwin only + if(UNIX OR APPLE) + CheckDLOPEN() + endif() +endif() + +if(SDL_VIDEO) + if(VIDEO_DUMMY) + set(SDL_VIDEO_DRIVER_DUMMY 1) + file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) + set(HAVE_VIDEO_DUMMY TRUE) + set(HAVE_SDL_VIDEO TRUE) + endif() +endif() + +# Platform-specific options and settings +if(EMSCRIPTEN) + # Hide noisy warnings that intend to aid mostly during initial stages of porting a new + # project. Uncomment at will for verbose cross-compiling -I/../ path info. + add_definitions(-Wno-warn-absolute-paths) + if(SDL_AUDIO) + file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) + endif() + if(SDL_FILESYSTEM) + file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) + endif() + if(SDL_JOYSTICK) + file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) + endif() + if(SDL_POWER) + file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) + endif() + if(SDL_VIDEO) + file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) + endif() +elseif(UNIX AND NOT APPLE) + if(SDL_AUDIO) + if(SYSV5 OR SOLARIS OR HPUX) + set(SDL_AUDIO_DRIVER_SUNAUDIO 1) + file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(NETBSD OR OPENBSD) + set(SDL_AUDIO_DRIVER_BSD 1) + file(GLOB BSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/bsd/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${BSD_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(AIX) + set(SDL_AUDIO_DRIVER_PAUDIO 1) + file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + CheckOSS() + CheckALSA() + CheckPulseAudio() + CheckESD() + CheckARTS() + CheckNAS() + CheckSNDIO() + CheckFusionSound() + endif() + + if(SDL_VIDEO) + CheckX11() + CheckMir() + CheckDirectFB() + CheckOpenGLX11() + CheckOpenGLESX11() + CheckWayland() + endif() + + if(LINUX) + check_c_source_compiles(" + #include + #ifndef EVIOCGNAME + #error EVIOCGNAME() ioctl not available + #endif + int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) + + check_c_source_compiles(" + #include + #include + + int main(int argc, char **argv) + { + struct kbentry kbe; + kbe.kb_table = KG_CTRL; + ioctl(0, KDGKBENT, &kbe); + }" HAVE_INPUT_KD) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(HAVE_INPUT_EVENTS) + set(SDL_INPUT_LINUXEV 1) + endif() + + if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) + set(SDL_HAPTIC_LINUX 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + if(HAVE_INPUT_KD) + set(SDL_INPUT_LINUXKD 1) + endif() + + check_include_file("libudev.h" HAVE_LIBUDEV_H) + + # !!! FIXME: this needs pkg-config to find the include path, I think. + check_include_file("dbus/dbus.h" HAVE_DBUS_DBUS_H) + endif() + + if(INPUT_TSLIB) + check_c_source_compiles(" + #include \"tslib.h\" + int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) + if(HAVE_INPUT_TSLIB) + set(SDL_INPUT_TSLIB 1) + list(APPEND EXTRA_LIBS ts) + endif() + endif() + + if(SDL_JOYSTICK) + CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? + if(LINUX) + set(SDL_JOYSTICK_LINUX 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + endif() + + CheckPTHREAD() + + if(CLOCK_GETTIME) + check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + list(APPEND EXTRA_LIBS rt) + set(HAVE_CLOCK_GETTIME 1) + else() + check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + endif() + + check_include_file(linux/version.h HAVE_LINUX_VERSION_H) + if(HAVE_LINUX_VERSION_H) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") + endif() + + if(SDL_POWER) + if(LINUX) + set(SDL_POWER_LINUX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_UNIX 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(RPATH) + set(SDL_RLD_FLAGS "") + if(BSDI OR FREEBSD OR LINUX OR NETBSD) + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") + elseif(SOLARIS) + set(SDL_RLD_FLAGS "-R\${libdir}") + endif() + set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) + set(HAVE_RPATH TRUE) + endif() + +elseif(WINDOWS) + find_program(WINDRES windres) + + check_c_source_compiles(" + #include + int main(int argc, char **argv) { }" HAVE_WIN32_CC) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + # Check for DirectX + if(DIRECTX) + if("$ENV{DXSDK_DIR}" STREQUAL "") + message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") + endif() + set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") + check_include_file(d3d9.h HAVE_D3D_H) + check_include_file(d3d11_1.h HAVE_D3D11_H) + check_include_file(ddraw.h HAVE_DDRAW_H) + check_include_file(dsound.h HAVE_DSOUND_H) + check_include_file(dinput.h HAVE_DINPUT_H) + check_include_file(xaudio2.h HAVE_XAUDIO2_H) + check_include_file(dxgi.h HAVE_DXGI_H) + if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H OR HAVE_XAUDIO2_H) + set(HAVE_DIRECTX TRUE) + # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks + link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) + include_directories($ENV{DXSDK_DIR}\\Include) + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_WINMM 1) + file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + + if(HAVE_DSOUND_H) + set(SDL_AUDIO_DRIVER_DSOUND 1) + file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) + endif() + + if(HAVE_XAUDIO2_H) + set(SDL_AUDIO_DRIVER_XAUDIO2 1) + file(GLOB XAUDIO2_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/xaudio2/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${XAUDIO2_AUDIO_SOURCES}) + endif() + endif() + + if(SDL_VIDEO) + # requires SDL_LOADSO on Windows (IME, DX, etc.) + if(NOT SDL_LOADSO) + message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") + endif() + set(SDL_VIDEO_DRIVER_WINDOWS 1) + file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) + + if(RENDER_D3D AND HAVE_D3D_H) + set(SDL_VIDEO_RENDER_D3D 1) + set(HAVE_RENDER_D3D TRUE) + endif() + if(RENDER_D3D AND HAVE_D3D11_H) + set(SDL_VIDEO_RENDER_D3D11 1) + set(HAVE_RENDER_D3D TRUE) + endif() + set(HAVE_SDL_VIDEO TRUE) + endif() + + if(SDL_THREADS) + set(SDL_THREAD_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c + ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) + set(HAVE_SDL_THREADS TRUE) + endif() + + if(SDL_POWER) + set(SDL_POWER_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) + set(HAVE_SDL_POWER TRUE) + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_WINDOWS 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Libraries for Win32 native and MinGW + list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) + + # TODO: in configure.in the check for timers is set on + # cygwin | mingw32* - does this include mingw32CE? + if(SDL_TIMERS) + set(SDL_TIMER_WINDOWS 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(SDL_LOADSO) + set(SDL_LOADSO_WINDOWS 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(SDL_VIDEO) + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_WGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + if(SDL_JOYSTICK) + if(HAVE_DINPUT_H) + set(SDL_JOYSTICK_DINPUT 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_dxjoystick.c) + list(APPEND EXTRA_LIBS dinput8 dxguid dxerr) + else() + set(SDL_JOYSTICK_WINMM 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_mmjoystick.c) + endif() + set(HAVE_SDL_JOYSTICK TRUE) + endif() + + if(SDL_HAPTIC AND HAVE_DINPUT_H) + set(SDL_HAPTIC_DINPUT 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/haptic/windows/SDL_syshaptic.c) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) + if(MINGW OR CYGWIN) + list(APPEND EXTRA_LIBS mingw32) + list(APPEND EXTRA_LDFLAGS "-mwindows") + set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") + list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") + endif() +elseif(APPLE) + # TODO: rework this for proper MacOS X, iOS and Darwin support + + # Requires the darwin file implementation + if(SDL_FILE) + file(GLOB EXTRA_SOURCES ${PROJECT_SOURCE_DIR}/src/file/cocoa/*.m) + set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) + set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) + set(HAVE_SDL_FILE TRUE) + set(SDL_FRAMEWORK_COCOA 1) + else() + message_error("SDL_FILE must be enabled to build on MacOS X") + endif() + + if(SDL_AUDIO) + set(MACOSX_COREAUDIO 1) + file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + set(SDL_FRAMEWORK_COREAUDIO 1) + set(SDL_FRAMEWORK_AUDIOUNIT 1) + endif() + + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_IOKIT 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + endif() + + if(SDL_HAPTIC) + set(SDL_HAPTIC_IOKIT 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + if(NOT SDL_JOYSTICK) + message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") + endif() + endif() + + if(SDL_POWER) + set(SDL_POWER_MACOSX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + set(SDL_FRAMEWORK_CARBON 1) + set(SDL_FRAMEWORK_IOKIT 1) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_COCOA 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) + set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Actually load the frameworks at the end so we don't duplicate include. + if(SDL_FRAMEWORK_COCOA) + find_library(COCOA_LIBRARY Cocoa) + list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) + endif() + if(SDL_FRAMEWORK_IOKIT) + find_library(IOKIT IOKit) + list(APPEND EXTRA_LIBS ${IOKIT}) + endif() + if(SDL_FRAMEWORK_FF) + find_library(FORCEFEEDBACK ForceFeedback) + list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) + endif() + if(SDL_FRAMEWORK_CARBON) + find_library(CARBON_LIBRARY Carbon) + list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) + endif() + if(SDL_FRAMEWORK_COREAUDIO) + find_library(COREAUDIO CoreAudio) + list(APPEND EXTRA_LIBS ${COREAUDIO}) + endif() + if(SDL_FRAMEWORK_AUDIOUNIT) + find_library(AUDIOUNIT AudioUnit) + list(APPEND EXTRA_LIBS ${AUDIOUNIT}) + endif() + + # iOS hack needed - http://code.google.com/p/ios-cmake/ ? + if(SDL_VIDEO) + CheckCOCOA() + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_CGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + if(DARWIN) + find_library(OpenGL_LIBRARY OpenGL) + list(APPEND EXTRA_LIBRARIES ${OpenGL_LIBRARY}) + endif() + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +elseif(HAIKU) + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_HAIKU 1) + file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + set(SDL_FILESYSTEM_HAIKU 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + + if(SDL_TIMERS) + set(SDL_TIMER_HAIKU 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(VIDEO_OPENGL) + # TODO: Use FIND_PACKAGE(OpenGL) instead + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_BGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + list(APPEND EXTRA_LIBS GL) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +endif() + +# Dummies +# configure.in does it differently: +# if not have X +# if enable_X { SDL_X_DISABLED = 1 } +# [add dummy sources] +# so it always adds a dummy, without checking, if it was actually requested. +# This leads to missing internal references on building, since the +# src/X/*.c does not get included. +if(NOT HAVE_SDL_JOYSTICK) + set(SDL_JOYSTICK_DISABLED 1) + if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX + + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + endif() +endif() +if(NOT HAVE_SDL_HAPTIC) + set(SDL_HAPTIC_DISABLED 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(NOT HAVE_SDL_LOADSO) + set(SDL_LOADSO_DISABLED 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) +endif() +if(NOT HAVE_SDL_FILESYSTEM) + set(SDL_FILESYSTEM_DISABLED 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) +endif() + +# We always need to have threads and timers around +if(NOT HAVE_SDL_THREADS) + set(SDL_THREADS_DISABLED 1) + file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) +endif() +if(NOT HAVE_SDL_TIMERS) + set(SDL_TIMERS_DISABLED 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) +endif() + +if(NOT SDLMAIN_SOURCES) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) +endif() + +# Append the -MMD -MT flags +# if(DEPENDENCY_TRACKING) +# if(COMPILER_IS_GNUCC) +# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") +# endif() +# endif() + +configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" + "${SDL2_BINARY_DIR}/include/SDL_config.h") + +# Prepare the flags and remove duplicates +if(EXTRA_LDFLAGS) + list(REMOVE_DUPLICATES EXTRA_LDFLAGS) +endif() +if(EXTRA_LIBS) + list(REMOVE_DUPLICATES EXTRA_LIBS) +endif() +if(EXTRA_CFLAGS) + list(REMOVE_DUPLICATES EXTRA_CFLAGS) +endif() +listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) +set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) + +# Compat helpers for the configuration files +if(NOT WINDOWS OR CYGWIN) + # TODO: we need a Windows script, too + execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) + + set(prefix ${CMAKE_INSTALL_PREFIX}) + set(exec_prefix "\${prefix}") + set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") + set(bindir "\${exec_prefix}/bin") + set(includedir "\${prefix}/include") + if(SDL_STATIC) + set(ENABLE_STATIC_TRUE "") + set(ENABLE_STATIC_FALSE "#") + else() + set(ENABLE_STATIC_TRUE "#") + set(ENABLE_STATIC_FALSE "") + endif() + if(SDL_SHARED) + set(ENABLE_SHARED_TRUE "") + set(ENABLE_SHARED_FALSE "#") + else() + set(ENABLE_SHARED_TRUE "#") + set(ENABLE_SHARED_FALSE "") + endif() + + # Clean up the different lists + listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") + set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) + list(REMOVE_DUPLICATES SDL_STATIC_LIBS) + listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) + set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) + listtostr(SDL_LIBS _SDL_LIBS) + set(SDL_LIBS ${_SDL_LIBS}) + + # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") + # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") + + configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" + "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config") + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" + "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) +endif() + +##### Info output ##### +message(STATUS "") +message(STATUS "SDL2 was configured with the following options:") +message(STATUS "") +message(STATUS "Platform: ${CMAKE_SYSTEM}") +message(STATUS "64-bit: ${ARCH_64}") +message(STATUS "Compiler: ${CMAKE_C_COMPILER}") +message(STATUS "") +message(STATUS "Subsystems:") +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + message_bool_option(${_SUB} SDL_${_OPT}) +endforeach() +message(STATUS "") +message(STATUS "Options:") +list(SORT ALLOPTIONS) +foreach(_OPT ${ALLOPTIONS}) + # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters + # Get the padding + string(LENGTH ${_OPT} _OPTLEN) + math(EXPR _PADLEN "23 - ${_OPTLEN}") + string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) + message_tested_option(${_OPT} ${_PADDING}) +endforeach() +message(STATUS "") +message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") +message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") +message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") +message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") +message(STATUS "") +message(STATUS " Build Shared Library: ${SDL_SHARED}") +message(STATUS " Build Static Library: ${SDL_STATIC}") +message(STATUS "") +if(UNIX) + message(STATUS "If something was not detected, although the libraries") + message(STATUS "were installed, then make sure you have set the") + message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") + message(STATUS "") +endif() + +# Ensure that the extra cflags are used at compile time +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") + +# Always build SDLmain +add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) +set(_INSTALL_LIBS "SDL2main") + +if(SDL_SHARED) + add_library(SDL2 SHARED ${SOURCE_FILES}) + if(UNIX) + set_target_properties(SDL2 PROPERTIES + VERSION ${LT_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2-${LT_RELEASE}") + else() + set_target_properties(SDL2 PROPERTIES + VERSION ${SDL_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2") + endif() + set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) + target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) +endif() + +if(SDL_STATIC) + set (BUILD_SHARED_LIBS FALSE) + add_library(SDL2-static STATIC ${SOURCE_FILES}) + set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") + if(MSVC) + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + # TODO: Win32 platforms keep the same suffix .lib for import and static + # libraries - do we need to consider this? + set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) + target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) +endif() + +##### Installation targets ##### +install(TARGETS ${_INSTALL_LIBS} + LIBRARY DESTINATION "lib${LIB_SUFFIX}" + ARCHIVE DESTINATION "lib${LIB_SUFFIX}") + +file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) +file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) +foreach(_FNAME ${BIN_INCLUDE_FILES}) + get_filename_component(_INCNAME ${_FNAME} NAME) + list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) +endforeach() +list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) +install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) + +if(NOT WINDOWS OR CYGWIN) + if(SDL_SHARED) + install(CODE " + execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + \"libSDL2-2.0.so\" \"libSDL2.so\")") + install(FILES ${SDL2_BINARY_DIR}/libSDL2.so DESTINATION "lib${LIB_SUFFIX}") + endif() + if(FREEBSD) + # FreeBSD uses ${PREFIX}/libdata/pkgconfig + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") + else() + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc + DESTINATION "lib${LIB_SUFFIX}/pkgconfig") + endif() + install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) + # TODO: what about the .spec file? Is it only needed for RPM creation? + install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "share/aclocal") +else() + install(TARGETS SDL2 RUNTIME DESTINATION bin) +endif() + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt new file mode 100644 index 0000000..bbad766 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt @@ -0,0 +1,1849 @@ + + +cmake_minimum_required(VERSION 2.8.11) +project(SDL2 C) + +# !!! FIXME: this should probably do "MACOSX_RPATH ON" as a target property +# !!! FIXME: for the SDL2 shared library (so you get an +# !!! FIXME: install_name ("soname") of "@rpath/libSDL-whatever.dylib" +# !!! FIXME: instead of "/usr/local/lib/libSDL-whatever.dylib"), but I'm +# !!! FIXME: punting for now and leaving the existing behavior. Until this +# !!! FIXME: properly resolved, this line silences a warning in CMake 3.0+. +# !!! FIXME: remove it and this comment entirely once the problem is +# !!! FIXME: properly resolved. +#cmake_policy(SET CMP0042 OLD) + +include(CheckFunctionExists) +include(CheckLibraryExists) +include(CheckIncludeFiles) +include(CheckIncludeFile) +include(CheckSymbolExists) +include(CheckCSourceCompiles) +include(CheckCSourceRuns) +include(CheckCCompilerFlag) +include(CheckTypeSize) +include(CheckStructHasMember) +include(CMakeDependentOption) +include(FindPkgConfig) +include(GNUInstallDirs) +set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") +include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) +include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) + +# General settings +# Edit include/SDL_version.h and change the version, then: +# SDL_MICRO_VERSION += 1; +# SDL_INTERFACE_AGE += 1; +# SDL_BINARY_AGE += 1; +# if any functions have been added, set SDL_INTERFACE_AGE to 0. +# if backwards compatibility has been broken, +# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. +set(SDL_MAJOR_VERSION 2) +set(SDL_MINOR_VERSION 0) +set(SDL_MICRO_VERSION 8) +set(SDL_INTERFACE_AGE 0) +set(SDL_BINARY_AGE 8) +set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") + +# Set defaults preventing destination file conflicts +set(SDL_CMAKE_DEBUG_POSTFIX "d" + CACHE STRING "Name suffix for debug builds") + +mark_as_advanced(CMAKE_IMPORT_LIBRARY_SUFFIX SDL_CMAKE_DEBUG_POSTFIX) + +# Calculate a libtool-like version number +math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") +math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") +set(LT_REVISION "${SDL_INTERFACE_AGE}") +set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") +set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") + +message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") + +# General settings & flags +set(LIBRARY_OUTPUT_DIRECTORY "build") +# Check for 64 or 32 bit +set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) +if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(ARCH_64 TRUE) + set(PROCESSOR_ARCH "x64") +else() + set(ARCH_64 FALSE) + set(PROCESSOR_ARCH "x86") +endif() +set(LIBNAME SDL2) +if(NOT LIBTYPE) + set(LIBTYPE SHARED) +endif() + +# Get the platform +if(WIN32) + if(NOT WINDOWS) + set(WINDOWS TRUE) + endif() +elseif(UNIX AND NOT APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") + set(LINUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") + set(NETBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") + set(OPENBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") + set(GNU TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") + set(BSDI TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") + set(FREEBSD TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") + set(SYSV5 TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") + set(SOLARIS TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") + set(HPUX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") + set(AIX TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") + set(MINIX TRUE) + endif() +elseif(APPLE) + if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") + set(DARWIN TRUE) + elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") + set(MACOSX TRUE) + endif() + # TODO: iOS? +elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") + message_error("BeOS support has been removed as of SDL 2.0.2.") +elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") + set(HAIKU TRUE) +endif() + +# Don't mistake osx for unix +if(UNIX AND NOT APPLE) + set(UNIX_SYS ON) +else() + set(UNIX_SYS OFF) +endif() + +if(UNIX OR APPLE) + set(UNIX_OR_MAC_SYS ON) +else() + set(UNIX_OR_MAC_SYS OFF) +endif() + +if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. + set(SDL_PTHREADS_ENABLED_BY_DEFAULT ON) +else() + set(SDL_PTHREADS_ENABLED_BY_DEFAULT OFF) +endif() + +# Default option knobs +if(APPLE OR ARCH_64) + if(NOT "${CMAKE_OSX_ARCHITECTURES}" MATCHES "arm") + set(OPT_DEF_SSEMATH ON) + endif() +endif() +if(UNIX OR MINGW OR MSYS) + set(OPT_DEF_LIBC ON) +endif() + +# Compiler info +if(CMAKE_COMPILER_IS_GNUCC) + set(USE_GCC TRUE) + set(OPT_DEF_ASM TRUE) +elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") + set(USE_CLANG TRUE) + set(OPT_DEF_ASM TRUE) +elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ + set(OPT_DEF_ASM TRUE) + #set(CMAKE_C_FLAGS "/ZI /WX- / +else() + set(OPT_DEF_ASM FALSE) +endif() + +if(USE_GCC OR USE_CLANG) + set(OPT_DEF_GCC_ATOMICS ON) +endif() + +# Default flags, if not set otherwise +if("$ENV{CFLAGS}" STREQUAL "") + if(CMAKE_BUILD_TYPE STREQUAL "") + if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "-g -O3") + endif() + endif() +else() + set(CMAKE_C_FLAGS "$ENV{CFLAGS}") + list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") +endif() +if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 + list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") +endif() + +if(MSVC) + option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) + if(FORCE_STATIC_VCRT) + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif() + endforeach() + endif() + + # Make sure /RTC1 is disabled, otherwise it will use functions from the CRT + foreach(flag_var + CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) + string(REGEX REPLACE "/RTC(su|[1su])" "" ${flag_var} "${${flag_var}}") + endforeach(flag_var) +endif() + +# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, +# etc. are created correctly. +set(SDL_LIBS "-lSDL2") +set(SDL_CFLAGS "") + +# Emscripten toolchain has a nonempty default value for this, and the checks +# in this file need to change that, so remember the original value, and +# restore back to that afterwards. For check_function_exists() to work in +# Emscripten, this value must be at its default value. +set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) + +if(CYGWIN) + # We build SDL on cygwin without the UNIX emulation layer + include_directories("-I/usr/include/mingw") + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mno-cygwin") + check_c_source_compiles("int main(int argc, char **argv) {}" + HAVE_GCC_NO_CYGWIN) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_GCC_NO_CYGWIN) + list(APPEND EXTRA_LDFLAGS "-mno-cygwin") + list(APPEND SDL_LIBS "-mno-cygwin") + endif() + set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") +endif() + +add_definitions(-DUSING_GENERATED_CONFIG_H) +# General includes +include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) +if(USE_GCC OR USE_CLANG) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -idirafter ${SDL2_SOURCE_DIR}/src/video/khronos") +else() + include_directories(${SDL2_SOURCE_DIR}/src/video/khronos) +endif() + +# All these ENABLED_BY_DEFAULT vars will default to ON if not specified, so +# you only need to have a platform override them if they are disabling. +set(OPT_DEF_ASM TRUE) +if(EMSCRIPTEN) + # Set up default values for the currently supported set of subsystems: + # Emscripten/Javascript does not have assembly support, a dynamic library + # loading architecture, low-level CPU inspection or multithreading. + set(OPT_DEF_ASM FALSE) + set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) + set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) + set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) + set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) + set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) + set(SDL_DLOPEN_ENABLED_BY_DEFAULT OFF) +endif() + +if (NOT DEFINED SDL_SHARED_ENABLED_BY_DEFAULT) + set(SDL_SHARED_ENABLED_BY_DEFAULT ON) +endif() + +set(SDL_SUBSYSTEMS + Atomic Audio Video Render Events Joystick Haptic Power Threads Timers + File Loadso CPUinfo Filesystem Dlopen) +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) + set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) + endif() + option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) +endforeach() + +option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") +#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) +set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) +set_option(GCC_ATOMICS "Use gcc builtin atomics" ${OPT_DEF_GCC_ATOMICS}) +set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) +set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) +set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) +set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) +set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) +set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(SSE3 "Use SSE3 assembly routines" ${OPT_DEF_SSEMATH}) +set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) +set_option(DISKAUDIO "Support the disk writer audio driver" ON) +set_option(DUMMYAUDIO "Support the dummy audio driver" ON) +set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) +dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) +set_option(VIDEO_DUMMY "Use dummy video driver" ON) +set_option(VIDEO_OPENGL "Include OpenGL support" ON) +set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) +set_option(PTHREADS "Use POSIX threads for multi-threading" ${SDL_PTHREADS_ENABLED_BY_DEFAULT}) +dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) +set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${SDL_DLOPEN_ENABLED_BY_DEFAULT}) +set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) +set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) +dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) +set_option(JACK "Support the JACK audio API" ${UNIX_SYS}) +dep_option(JACK_SHARED "Dynamically load JACK audio support" ON "JACK" OFF) +set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) +dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) +set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) +dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) +set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) +dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) +set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) +set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) +set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) +set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) +dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND" OFF) +set_option(LIBSAMPLERATE "Use libsamplerate for audio rate conversion" ${UNIX_SYS}) +dep_option(LIBSAMPLERATE_SHARED "Dynamically load libsamplerate" ON "LIBSAMPLERATE" OFF) +set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) +set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) +set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) +set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) +set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) +dep_option(WAYLAND_SHARED "Dynamically load Wayland support" ON "VIDEO_WAYLAND" OFF) +dep_option(VIDEO_WAYLAND_QT_TOUCH "QtWayland server support for Wayland video driver" ON "VIDEO_WAYLAND" OFF) +set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) +dep_option(MIR_SHARED "Dynamically load Mir support" ON "VIDEO_MIR" OFF) +set_option(VIDEO_RPI "Use Raspberry Pi video driver" ${UNIX_SYS}) +dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) +set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) +foreach(_SUB ${SDL_X11_OPTIONS}) + string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) + dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) +endforeach() +set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) +set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) +set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) +set_option(VIDEO_VIVANTE "Use Vivante EGL video driver" ${UNIX_SYS}) +dep_option(VIDEO_VULKAN "Enable Vulkan support" ON "ANDROID OR APPLE OR LINUX OR WINDOWS" OFF) +set_option(VIDEO_KMSDRM "Use KMS DRM video driver" ${UNIX_SYS}) +dep_option(KMSDRM_SHARED "Dynamically load KMS DRM support" ON "VIDEO_KMSDRM" OFF) + +# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here +# The options below are for compatibility to configure's default behaviour. +# set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") +set(SDL_SHARED ON CACHE BOOL "Build a shared version of the library") +set(SDL_STATIC OFF CACHE BOOL "Build a static version of the library") + +dep_option(SDL_STATIC_PIC "Static version of the library should be built with Position Independent Code" OFF "SDL_STATIC" OFF) +set_option(SDL_TEST "Build the test directory" OFF) + + +# General source files +file(GLOB SOURCE_FILES + ${SDL2_SOURCE_DIR}/src/*.c + ${SDL2_SOURCE_DIR}/src/atomic/*.c + ${SDL2_SOURCE_DIR}/src/audio/*.c + ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c + ${SDL2_SOURCE_DIR}/src/dynapi/*.c + ${SDL2_SOURCE_DIR}/src/events/*.c + ${SDL2_SOURCE_DIR}/src/file/*.c + ${SDL2_SOURCE_DIR}/src/libm/*.c + ${SDL2_SOURCE_DIR}/src/render/*.c + ${SDL2_SOURCE_DIR}/src/render/*/*.c + ${SDL2_SOURCE_DIR}/src/stdlib/*.c + ${SDL2_SOURCE_DIR}/src/thread/*.c + ${SDL2_SOURCE_DIR}/src/timer/*.c + ${SDL2_SOURCE_DIR}/src/video/*.c + ${SDL2_SOURCE_DIR}/src/video/yuv2rgb/*.c) + + +if(ASSERTIONS STREQUAL "auto") + # Do nada - use optimization settings to determine the assertion level +elseif(ASSERTIONS STREQUAL "disabled") + set(SDL_DEFAULT_ASSERT_LEVEL 0) +elseif(ASSERTIONS STREQUAL "release") + set(SDL_DEFAULT_ASSERT_LEVEL 1) +elseif(ASSERTIONS STREQUAL "enabled") + set(SDL_DEFAULT_ASSERT_LEVEL 2) +elseif(ASSERTIONS STREQUAL "paranoid") + set(SDL_DEFAULT_ASSERT_LEVEL 3) +else() + message_error("unknown assertion level") +endif() +set(HAVE_ASSERTIONS ${ASSERTIONS}) + +# Compiler option evaluation +if(USE_GCC OR USE_CLANG) + # Check for -Wall first, so later things can override pieces of it. + check_c_compiler_flag(-Wall HAVE_GCC_WALL) + if(HAVE_GCC_WALL) + list(APPEND EXTRA_CFLAGS "-Wall") + if(HAIKU) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") + endif() + endif() + + check_c_compiler_flag(-Wdeclaration-after-statement HAVE_GCC_WDECLARATION_AFTER_STATEMENT) + if(HAVE_GCC_WDECLARATION_AFTER_STATEMENT) + check_c_compiler_flag(-Werror=declaration-after-statement HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) + if(HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) + list(APPEND EXTRA_CFLAGS "-Werror=declaration-after-statement") + endif() + list(APPEND EXTRA_CFLAGS "-Wdeclaration-after-statement") + endif() + + if(DEPENDENCY_TRACKING) + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 3 + #error Dependency tracking requires GCC 3.0 or newer + #endif + int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) + endif() + + if(GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + void *x, *y, *z; + __sync_lock_test_and_set(&a, 4); + __sync_lock_test_and_set(&x, y); + __sync_fetch_and_add(&a, 1); + __sync_bool_compare_and_swap(&a, 5, 10); + __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) + if(NOT HAVE_GCC_ATOMICS) + check_c_source_compiles("int main(int argc, char **argv) { + int a; + __sync_lock_test_and_set(&a, 1); + __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) + endif() + endif() + + set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") + check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" + HAVE_GCC_PREFERRED_STACK_BOUNDARY) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror") + check_c_source_compiles(" + #if !defined(__GNUC__) || __GNUC__ < 4 + #error SDL only uses visibility attributes in GCC 4 or newer + #endif + int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) + if(HAVE_GCC_FVISIBILITY) + list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + + check_c_compiler_flag(-Wshadow HAVE_GCC_WSHADOW) + if(HAVE_GCC_WSHADOW) + list(APPEND EXTRA_CFLAGS "-Wshadow") + endif() + + if(APPLE) + list(APPEND EXTRA_LDFLAGS "-Wl,-undefined,error") + else() + set(CMAKE_REQUIRED_FLAGS "-Wl,--no-undefined") + check_c_compiler_flag("" HAVE_NO_UNDEFINED) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_NO_UNDEFINED) + list(APPEND EXTRA_LDFLAGS "-Wl,--no-undefined") + endif() + endif() +endif() + +if(ASSEMBLY) + if(USE_GCC OR USE_CLANG) + set(SDL_ASSEMBLY_ROUTINES 1) + # TODO: Those all seem to be quite GCC specific - needs to be + # reworked for better compiler support + set(HAVE_ASSEMBLY TRUE) + if(MMX) + set(CMAKE_REQUIRED_FLAGS "-mmmx") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __MMX__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_MMX) + if(HAVE_MMX) + list(APPEND EXTRA_CFLAGS "-mmmx") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(3DNOW) + set(CMAKE_REQUIRED_FLAGS "-m3dnow") + check_c_source_compiles(" + #include + #ifndef __3dNOW__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { + void *p = 0; + _m_prefetch(p); + }" HAVE_3DNOW) + if(HAVE_3DNOW) + list(APPEND EXTRA_CFLAGS "-m3dnow") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE) + set(CMAKE_REQUIRED_FLAGS "-msse") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE) + if(HAVE_SSE) + list(APPEND EXTRA_CFLAGS "-msse") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE2) + set(CMAKE_REQUIRED_FLAGS "-msse2") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE2__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE2) + if(HAVE_SSE2) + list(APPEND EXTRA_CFLAGS "-msse2") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SSE3) + set(CMAKE_REQUIRED_FLAGS "-msse3") + check_c_source_compiles(" + #ifdef __MINGW32__ + #include <_mingw.h> + #ifdef __MINGW64_VERSION_MAJOR + #include + #else + #include + #endif + #else + #include + #endif + #ifndef __SSE3__ + #error Assembler CPP flag not enabled + #endif + int main(int argc, char **argv) { }" HAVE_SSE3) + if(HAVE_SSE3) + list(APPEND EXTRA_CFLAGS "-msse3") + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(NOT SSEMATH) + if(SSE OR SSE2 OR SSE3) + if(USE_GCC) + check_c_compiler_flag(-mfpmath=387 HAVE_FP_387) + if(HAVE_FP_387) + list(APPEND EXTRA_CFLAGS "-mfpmath=387") + endif() + endif() + set(HAVE_SSEMATH TRUE) + endif() + endif() + + check_include_file("immintrin.h" HAVE_IMMINTRIN_H) + + if(ALTIVEC) + set(CMAKE_REQUIRED_FLAGS "-maltivec") + check_c_source_compiles(" + #include + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) + check_c_source_compiles(" + vector unsigned int vzero() { + return vec_splat_u32(0); + } + int main(int argc, char **argv) { }" HAVE_ALTIVEC) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set + list(APPEND EXTRA_CFLAGS "-maltivec") + set(SDL_ALTIVEC_BLITTERS 1) + if(HAVE_ALTIVEC_H_HDR) + set(HAVE_ALTIVEC_H 1) + endif() + endif() + endif() + elseif(MSVC_VERSION GREATER 1500) + # TODO: SDL_cpuinfo.h needs to support the user's configuration wish + # for MSVC - right now it is always activated + if(NOT ARCH_64) + set(HAVE_MMX TRUE) + set(HAVE_3DNOW TRUE) + endif() + set(HAVE_SSE TRUE) + set(HAVE_SSE2 TRUE) + set(HAVE_SSE3 TRUE) + set(SDL_ASSEMBLY_ROUTINES 1) + endif() +# TODO: +#else() +# if(USE_GCC OR USE_CLANG) +# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-sse3" "-mno-mmx") +# endif() +endif() + +# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define +# anything. +if(LIBC) + if(WINDOWS AND NOT MINGW) + set(HAVE_LIBC TRUE) + foreach(_HEADER stdio.h string.h wchar.h ctype.h math.h limits.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + set(${_HAVE_H} 1) + endforeach() + set(HAVE_SIGNAL_H 1) + foreach(_FN + malloc calloc realloc free qsort abs memset memcpy memmove memcmp + wcslen wcscmp + strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp + _stricmp _strnicmp sscanf + acos acosf asin asinf atan atanf atan2 atan2f ceil ceilf + copysign copysignf cos cosf fabs fabsf floor floorf fmod fmodf + log logf log10 log10f pow powf scalbn scalbnf sin sinf sqrt sqrtf tan tanf) + string(TOUPPER ${_FN} _UPPER) + set(HAVE_${_UPPER} 1) + endforeach() + if(NOT CYGWIN AND NOT MINGW) + set(HAVE_ALLOCA 1) + endif() + set(HAVE_M_PI 1) + add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI + set(STDC_HEADERS 1) + else() + set(HAVE_LIBC TRUE) + check_include_file(sys/types.h HAVE_SYS_TYPES_H) + foreach(_HEADER + stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h limits.h + strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h libunwind.h) + string(TOUPPER "HAVE_${_HEADER}" _UPPER) + string(REPLACE "." "_" _HAVE_H ${_UPPER}) + check_include_file("${_HEADER}" ${_HAVE_H}) + endforeach() + + check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) + check_type_size("size_t" SIZEOF_SIZE_T) + check_symbol_exists(M_PI math.h HAVE_M_PI) + # TODO: refine the mprotect check + check_c_source_compiles("#include + #include + int main() { }" HAVE_MPROTECT) + foreach(_FN + strtod malloc calloc realloc free getenv setenv putenv unsetenv + qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat + _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa + _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull + atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp + vsscanf vsnprintf fopen64 fseeko fseeko64 sigaction setjmp + nanosleep sysconf sysctlbyname getauxval poll + ) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + + check_library_exists(m pow "" HAVE_LIBM) + if(HAVE_LIBM) + set(CMAKE_REQUIRED_LIBRARIES m) + foreach(_FN + atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin + sinf sqrt sqrtf tan tanf acos asin) + string(TOUPPER ${_FN} _UPPER) + set(_HAVEVAR "HAVE_${_UPPER}") + check_function_exists("${_FN}" ${_HAVEVAR}) + endforeach() + set(CMAKE_REQUIRED_LIBRARIES) + list(APPEND EXTRA_LIBS m) + endif() + + check_library_exists(iconv iconv_open "" HAVE_LIBICONV) + if(HAVE_LIBICONV) + list(APPEND EXTRA_LIBS iconv) + set(HAVE_ICONV 1) + endif() + + if(NOT APPLE) + check_include_file(alloca.h HAVE_ALLOCA_H) + check_function_exists(alloca HAVE_ALLOCA) + else() + set(HAVE_ALLOCA_H 1) + set(HAVE_ALLOCA 1) + endif() + + check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) + endif() +else() + if(WINDOWS) + set(HAVE_STDARG_H 1) + set(HAVE_STDDEF_H 1) + endif() +endif() + + +# Enable/disable various subsystems of the SDL library +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + if(NOT SDL_${_OPT}) + set(SDL_${_OPT}_DISABLED 1) + endif() +endforeach() +if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) +endif() +if(SDL_HAPTIC) + if(NOT SDL_JOYSTICK) + # Haptic requires some private functions from the joystick subsystem. + message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") + endif() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(SDL_POWER) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) +endif() +# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: +# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set +# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), +# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings + +# General SDL subsystem options, valid for all platforms +if(SDL_AUDIO) + # CheckDummyAudio/CheckDiskAudio - valid for all platforms + if(DUMMYAUDIO) + set(SDL_AUDIO_DRIVER_DUMMY 1) + file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) + set(HAVE_DUMMYAUDIO TRUE) + endif() + if(DISKAUDIO) + set(SDL_AUDIO_DRIVER_DISK 1) + file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) + set(HAVE_DISKAUDIO TRUE) + endif() +endif() + +if(SDL_DLOPEN) + # Relevant for Unix/Darwin only + if(UNIX OR APPLE) + CheckDLOPEN() + endif() +endif() + +if(SDL_VIDEO) + if(VIDEO_DUMMY) + set(SDL_VIDEO_DRIVER_DUMMY 1) + file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) + set(HAVE_VIDEO_DUMMY TRUE) + set(HAVE_SDL_VIDEO TRUE) + endif() +endif() + +if(ANDROID) + file(GLOB ANDROID_CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_CORE_SOURCES}) + + # SDL_spinlock.c Needs to be compiled in ARM mode. + # There seems to be no better way currently to set the ARM mode. + # see: https://issuetracker.google.com/issues/62264618 + # Another option would be to set ARM mode to all compiled files + check_c_compiler_flag(-marm HAVE_ARM_MODE) + if(HAVE_ARM_MODE) + set_source_files_properties(${SDL2_SOURCE_DIR}/src/atomic/SDL_spinlock.c PROPERTIES COMPILE_FLAGS -marm) + endif() + + file(GLOB ANDROID_MAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/android/*.c) + set(SDLMAIN_SOURCES ${SDLMAIN_SOURCES} ${ANDROID_MAIN_SOURCES}) + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_ANDROID 1) + file(GLOB ANDROID_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_ANDROID 1) + file(GLOB ANDROID_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + if(SDL_HAPTIC) + set(SDL_HAPTIC_ANDROID 1) + file(GLOB ANDROID_HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_ANDROID 1) + file(GLOB ANDROID_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/android/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + if(SDL_LOADSO) + set(SDL_LOADSO_DLOPEN 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dlopen/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + if(SDL_POWER) + set(SDL_POWER_ANDROID 1) + file(GLOB ANDROID_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_ANDROID 1) + file(GLOB ANDROID_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/android/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_VIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + # Core stuff + find_library(ANDROID_DL_LIBRARY dl) + find_library(ANDROID_LOG_LIBRARY log) + find_library(ANDROID_LIBRARY_LIBRARY android) + list(APPEND EXTRA_LIBS ${ANDROID_DL_LIBRARY} ${ANDROID_LOG_LIBRARY} ${ANDROID_LIBRARY_LIBRARY}) + add_definitions(-DGL_GLEXT_PROTOTYPES) + + #enable gles + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(HAVE_VIDEO_OPENGLES TRUE) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + + find_library(OpenGLES1_LIBRARY GLESv1_CM) + find_library(OpenGLES2_LIBRARY GLESv2) + list(APPEND EXTRA_LIBS ${OpenGLES1_LIBRARY} ${OpenGLES2_LIBRARY}) + endif() + + CHECK_C_SOURCE_COMPILES(" + #if defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error Vulkan doesn't work on this configuration + #endif + int main() + { + return 0; + } + " VULKAN_PASSED_ANDROID_CHECKS) + if(NOT VULKAN_PASSED_ANDROID_CHECKS) + set(VIDEO_VULKAN OFF) + message(STATUS "Vulkan doesn't work on this configuration") + endif() + endif() + + CheckPTHREAD() + +endif() + +# Platform-specific options and settings +if(EMSCRIPTEN) + # Hide noisy warnings that intend to aid mostly during initial stages of porting a new + # project. Uncomment at will for verbose cross-compiling -I/../ path info. + add_definitions(-Wno-warn-absolute-paths) + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_EMSCRIPTEN 1) + file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_EMSCRIPTEN 1) + file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_EMSCRIPTEN 1) + file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + if(SDL_POWER) + set(SDL_POWER_EMSCRIPTEN 1) + file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + + if(CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_EMSCRIPTEN 1) + file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + #enable gles + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(HAVE_VIDEO_OPENGLES TRUE) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + endif() + endif() +elseif(UNIX AND NOT APPLE AND NOT ANDROID) + if(SDL_AUDIO) + if(SYSV5 OR SOLARIS OR HPUX) + set(SDL_AUDIO_DRIVER_SUNAUDIO 1) + file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(NETBSD) + set(SDL_AUDIO_DRIVER_NETBSD 1) + file(GLOB NETBSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/netbsd/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${NETBSD_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + elseif(AIX) + set(SDL_AUDIO_DRIVER_PAUDIO 1) + file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + endif() + CheckOSS() + CheckALSA() + CheckJACK() + CheckPulseAudio() + CheckESD() + CheckARTS() + CheckNAS() + CheckSNDIO() + CheckFusionSound() + CheckLibSampleRate() + endif() + + if(SDL_VIDEO) + # Need to check for Raspberry PI first and add platform specific compiler flags, otherwise the test for GLES fails! + CheckRPI() + CheckX11() + CheckMir() + CheckDirectFB() + CheckOpenGLX11() + CheckOpenGLESX11() + CheckWayland() + CheckVivante() + CheckKMSDRM() + endif() + + if(UNIX) + file(GLOB CORE_UNIX_SOURCES ${SDL2_SOURCE_DIR}/src/core/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_UNIX_SOURCES}) + endif() + + if(LINUX) + check_c_source_compiles(" + #include + #ifndef EVIOCGNAME + #error EVIOCGNAME() ioctl not available + #endif + int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) + + check_c_source_compiles(" + #include + #include + + int main(int argc, char **argv) + { + struct kbentry kbe; + kbe.kb_table = KG_CTRL; + ioctl(0, KDGKBENT, &kbe); + }" HAVE_INPUT_KD) + + file(GLOB CORE_LINUX_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_LINUX_SOURCES}) + + if(HAVE_INPUT_EVENTS) + set(SDL_INPUT_LINUXEV 1) + endif() + + if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) + set(SDL_HAPTIC_LINUX 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + + if(HAVE_INPUT_KD) + set(SDL_INPUT_LINUXKD 1) + endif() + + check_include_file("libudev.h" HAVE_LIBUDEV_H) + + if(PKG_CONFIG_FOUND) + pkg_search_module(DBUS dbus-1 dbus) + if(DBUS_FOUND) + set(HAVE_DBUS_DBUS_H TRUE) + include_directories(${DBUS_INCLUDE_DIRS}) + list(APPEND EXTRA_LIBS ${DBUS_LIBRARIES}) + endif() + + pkg_search_module(IBUS ibus-1.0 ibus) + if(IBUS_FOUND) + set(HAVE_IBUS_IBUS_H TRUE) + include_directories(${IBUS_INCLUDE_DIRS}) + list(APPEND EXTRA_LIBS ${IBUS_LIBRARIES}) + endif() + endif() + + check_include_file("fcitx/frontend.h" HAVE_FCITX_FRONTEND_H) + endif() + + if(INPUT_TSLIB) + check_c_source_compiles(" + #include \"tslib.h\" + int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) + if(HAVE_INPUT_TSLIB) + set(SDL_INPUT_TSLIB 1) + list(APPEND EXTRA_LIBS ts) + endif() + endif() + + if(SDL_JOYSTICK) + CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? + if(LINUX AND NOT ANDROID) + set(SDL_JOYSTICK_LINUX 1) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + endif() + endif() + + CheckPTHREAD() + + if(CLOCK_GETTIME) + check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + list(APPEND EXTRA_LIBS rt) + set(HAVE_CLOCK_GETTIME 1) + else() + check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) + if(FOUND_CLOCK_GETTIME) + set(HAVE_CLOCK_GETTIME 1) + endif() + endif() + endif() + + check_include_file(linux/version.h HAVE_LINUX_VERSION_H) + if(HAVE_LINUX_VERSION_H) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") + endif() + + if(SDL_POWER) + if(LINUX) + set(SDL_POWER_LINUX 1) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + endif() + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_UNIX 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(RPATH) + set(SDL_RLD_FLAGS "") + if(BSDI OR FREEBSD OR LINUX OR NETBSD) + set(CMAKE_REQUIRED_FLAGS "-Wl,--enable-new-dtags") + check_c_compiler_flag("" HAVE_ENABLE_NEW_DTAGS) + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + if(HAVE_ENABLE_NEW_DTAGS) + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir} -Wl,--enable-new-dtags") + else() + set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") + endif() + elseif(SOLARIS) + set(SDL_RLD_FLAGS "-R\${libdir}") + endif() + set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) + set(HAVE_RPATH TRUE) + endif() + +elseif(WINDOWS) + find_program(WINDRES windres) + + check_c_source_compiles(" + #include + int main(int argc, char **argv) { }" HAVE_WIN32_CC) + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(MSVC) + # Prevent codegen that would use the VC runtime libraries. + set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/GS-") + if(NOT ARCH_64) + set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/arch:SSE") + endif() + endif() + + # Check for DirectX + if(DIRECTX) + if(DEFINED MSVC_VERSION AND NOT ${MSVC_VERSION} LESS 1700) + set(USE_WINSDK_DIRECTX TRUE) + endif() + if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) + if("$ENV{DXSDK_DIR}" STREQUAL "") + message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") + endif() + set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") + endif() + + if(HAVE_WIN32_CC) + # xinput.h may need windows.h, but doesn't include it itself. + check_c_source_compiles(" + #include + #include + int main(int argc, char **argv) { }" HAVE_XINPUT_H) + check_c_source_compiles(" + #include + #include + XINPUT_GAMEPAD_EX x1; + int main(int argc, char **argv) { }" HAVE_XINPUT_GAMEPAD_EX) + check_c_source_compiles(" + #include + #include + XINPUT_STATE_EX s1; + int main(int argc, char **argv) { }" HAVE_XINPUT_STATE_EX) + else() + check_include_file(xinput.h HAVE_XINPUT_H) + endif() + + check_include_file(d3d9.h HAVE_D3D_H) + check_include_file(d3d11_1.h HAVE_D3D11_H) + check_include_file(ddraw.h HAVE_DDRAW_H) + check_include_file(dsound.h HAVE_DSOUND_H) + check_include_file(dinput.h HAVE_DINPUT_H) + check_include_file(mmdeviceapi.h HAVE_MMDEVICEAPI_H) + check_include_file(audioclient.h HAVE_AUDIOCLIENT_H) + check_include_file(dxgi.h HAVE_DXGI_H) + if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H) + set(HAVE_DIRECTX TRUE) + if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) + # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks + link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) + include_directories($ENV{DXSDK_DIR}\\Include) + endif() + endif() + set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_WINMM 1) + file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + + if(HAVE_DSOUND_H) + set(SDL_AUDIO_DRIVER_DSOUND 1) + file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) + endif() + + if(HAVE_AUDIOCLIENT_H AND HAVE_MMDEVICEAPI_H) + set(SDL_AUDIO_DRIVER_WASAPI 1) + file(GLOB WASAPI_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/wasapi/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WASAPI_AUDIO_SOURCES}) + endif() + endif() + + if(SDL_VIDEO) + # requires SDL_LOADSO on Windows (IME, DX, etc.) + if(NOT SDL_LOADSO) + message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") + endif() + set(SDL_VIDEO_DRIVER_WINDOWS 1) + file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) + + if(RENDER_D3D AND HAVE_D3D_H) + set(SDL_VIDEO_RENDER_D3D 1) + set(HAVE_RENDER_D3D TRUE) + endif() + if(RENDER_D3D AND HAVE_D3D11_H) + set(SDL_VIDEO_RENDER_D3D11 1) + set(HAVE_RENDER_D3D TRUE) + endif() + set(HAVE_SDL_VIDEO TRUE) + endif() + + if(SDL_THREADS) + set(SDL_THREAD_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c + ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c + ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) + set(HAVE_SDL_THREADS TRUE) + endif() + + if(SDL_POWER) + set(SDL_POWER_WINDOWS 1) + set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) + set(HAVE_SDL_POWER TRUE) + endif() + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_WINDOWS 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Libraries for Win32 native and MinGW + list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) + + # TODO: in configure.in the check for timers is set on + # cygwin | mingw32* - does this include mingw32CE? + if(SDL_TIMERS) + set(SDL_TIMER_WINDOWS 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif() + + if(SDL_LOADSO) + set(SDL_LOADSO_WINDOWS 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) + set(HAVE_SDL_LOADSO TRUE) + endif() + + file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) + + if(SDL_VIDEO) + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_WGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + set(HAVE_VIDEO_OPENGLES TRUE) + endif() + endif() + + if(SDL_JOYSTICK) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/windows/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + if(HAVE_DINPUT_H) + set(SDL_JOYSTICK_DINPUT 1) + list(APPEND EXTRA_LIBS dinput8) + if(CMAKE_COMPILER_IS_MINGW) + list(APPEND EXTRA_LIBS dxerr8) + elseif (NOT USE_WINSDK_DIRECTX) + list(APPEND EXTRA_LIBS dxerr) + endif() + endif() + if(HAVE_XINPUT_H) + set(SDL_JOYSTICK_XINPUT 1) + endif() + if(NOT HAVE_DINPUT_H AND NOT HAVE_XINPUT_H) + set(SDL_JOYSTICK_WINMM 1) + endif() + set(HAVE_SDL_JOYSTICK TRUE) + + if(SDL_HAPTIC) + if(HAVE_DINPUT_H OR HAVE_XINPUT_H) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/windows/*.c) + if(HAVE_DINPUT_H) + set(SDL_HAPTIC_DINPUT 1) + endif() + if(HAVE_XINPUT_H) + set(SDL_HAPTIC_XINPUT 1) + endif() + else() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SDL_HAPTIC_DUMMY 1) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + endif() + endif() + + file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) + if(MINGW OR CYGWIN) + list(APPEND EXTRA_LIBS mingw32) + list(APPEND EXTRA_LDFLAGS "-mwindows") + set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") + list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") + endif() +elseif(APPLE) + # TODO: rework this all for proper MacOS X, iOS and Darwin support + + # We always need these libs on macOS at the moment. + # !!! FIXME: we need Carbon for some very old API calls in + # !!! FIXME: src/video/cocoa/SDL_cocoakeyboard.c, but we should figure out + # !!! FIXME: how to dump those. + if(NOT IOS) + set(SDL_FRAMEWORK_COCOA 1) + set(SDL_FRAMEWORK_CARBON 1) + endif() + + # Requires the darwin file implementation + if(SDL_FILE) + file(GLOB EXTRA_SOURCES ${SDL2_SOURCE_DIR}/src/file/cocoa/*.m) + set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) + set(HAVE_SDL_FILE TRUE) + # !!! FIXME: why is COREVIDEO inside this if() block? + set(SDL_FRAMEWORK_COREVIDEO 1) + else() + message_error("SDL_FILE must be enabled to build on MacOS X") + endif() + + if(SDL_AUDIO) + set(SDL_AUDIO_DRIVER_COREAUDIO 1) + file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.m) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${AUDIO_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) + set(HAVE_SDL_AUDIO TRUE) + set(SDL_FRAMEWORK_COREAUDIO 1) + set(SDL_FRAMEWORK_AUDIOTOOLBOX 1) + endif() + + if(SDL_JOYSTICK) + set(SDL_JOYSTICK_IOKIT 1) + if (IOS) + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/iphoneos/*.m ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) + else() + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + set(HAVE_SDL_JOYSTICK TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + endif() + + if(SDL_HAPTIC) + set(SDL_HAPTIC_IOKIT 1) + if (IOS) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SDL_HAPTIC_DUMMY 1) + else() + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) + set(HAVE_SDL_HAPTIC TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + set(SDL_FRAMEWORK_FF 1) + if(NOT SDL_JOYSTICK) + message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") + endif() + endif() + + if(SDL_POWER) + set(SDL_POWER_MACOSX 1) + if (IOS) + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/uikit/*.m) + else() + file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) + endif() + set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) + set(HAVE_SDL_POWER TRUE) + set(SDL_FRAMEWORK_IOKIT 1) + endif() + + if(SDL_TIMERS) + set(SDL_TIMER_UNIX 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(SDL_FILESYSTEM) + set(SDL_FILESYSTEM_COCOA 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) + # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. + set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + endif() + + # Actually load the frameworks at the end so we don't duplicate include. + if(SDL_FRAMEWORK_COREVIDEO) + find_library(COREVIDEO CoreVideo) + list(APPEND EXTRA_LIBS ${COREVIDEO}) + endif() + if(SDL_FRAMEWORK_COCOA) + find_library(COCOA_LIBRARY Cocoa) + list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) + endif() + if(SDL_FRAMEWORK_IOKIT) + find_library(IOKIT IOKit) + list(APPEND EXTRA_LIBS ${IOKIT}) + endif() + if(SDL_FRAMEWORK_FF) + find_library(FORCEFEEDBACK ForceFeedback) + list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) + endif() + if(SDL_FRAMEWORK_CARBON) + find_library(CARBON_LIBRARY Carbon) + list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) + endif() + if(SDL_FRAMEWORK_COREAUDIO) + find_library(COREAUDIO CoreAudio) + list(APPEND EXTRA_LIBS ${COREAUDIO}) + endif() + if(SDL_FRAMEWORK_AUDIOTOOLBOX) + find_library(AUDIOTOOLBOX AudioToolbox) + list(APPEND EXTRA_LIBS ${AUDIOTOOLBOX}) + endif() + + # iOS hack needed - http://code.google.com/p/ios-cmake/ ? + if(SDL_VIDEO) + if (IOS) + set(SDL_VIDEO_DRIVER_UIKIT 1) + file(GLOB UIKITVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/uikit/*.m) + set(SOURCE_FILES ${SOURCE_FILES} ${UIKITVIDEO_SOURCES}) + else() + CheckCOCOA() + if(VIDEO_OPENGL) + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_CGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + + if(VIDEO_OPENGLES) + set(SDL_VIDEO_OPENGL_EGL 1) + set(SDL_VIDEO_OPENGL_ES2 1) + set(SDL_VIDEO_RENDER_OGL_ES2 1) + set(HAVE_VIDEO_OPENGLES TRUE) + endif() + endif() + endif() + + CheckPTHREAD() +elseif(HAIKU) + if(SDL_VIDEO) + set(SDL_VIDEO_DRIVER_HAIKU 1) + file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) + set(HAVE_SDL_VIDEO TRUE) + + set(SDL_FILESYSTEM_HAIKU 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) + set(HAVE_SDL_FILESYSTEM TRUE) + + if(SDL_TIMERS) + set(SDL_TIMER_HAIKU 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) + set(HAVE_SDL_TIMERS TRUE) + endif(SDL_TIMERS) + + if(VIDEO_OPENGL) + # TODO: Use FIND_PACKAGE(OpenGL) instead + set(SDL_VIDEO_OPENGL 1) + set(SDL_VIDEO_OPENGL_BGL 1) + set(SDL_VIDEO_RENDER_OGL 1) + list(APPEND EXTRA_LIBS GL) + set(HAVE_VIDEO_OPENGL TRUE) + endif() + endif() + + CheckPTHREAD() +endif() + +if(VIDEO_VULKAN) + set(SDL_VIDEO_VULKAN 1) +endif() + +# Dummies +# configure.in does it differently: +# if not have X +# if enable_X { SDL_X_DISABLED = 1 } +# [add dummy sources] +# so it always adds a dummy, without checking, if it was actually requested. +# This leads to missing internal references on building, since the +# src/X/*.c does not get included. +if(NOT HAVE_SDL_JOYSTICK) + set(SDL_JOYSTICK_DISABLED 1) + if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX + + file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) + endif() +endif() +if(NOT HAVE_SDL_HAPTIC) + set(SDL_HAPTIC_DISABLED 1) + file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) +endif() +if(NOT HAVE_SDL_LOADSO) + set(SDL_LOADSO_DISABLED 1) + file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) +endif() +if(NOT HAVE_SDL_FILESYSTEM) + set(SDL_FILESYSTEM_DISABLED 1) + file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) +endif() + +# We always need to have threads and timers around +if(NOT HAVE_SDL_THREADS) + set(SDL_THREADS_DISABLED 1) + file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) +endif() +if(NOT HAVE_SDL_TIMERS) + set(SDL_TIMERS_DISABLED 1) + file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) + set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) +endif() + +if(NOT SDLMAIN_SOURCES) + file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) +endif() + +# Append the -MMD -MT flags +# if(DEPENDENCY_TRACKING) +# if(COMPILER_IS_GNUCC) +# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") +# endif() +# endif() + +configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" + "${SDL2_BINARY_DIR}/include/SDL_config.h") + +# Prepare the flags and remove duplicates +if(EXTRA_LDFLAGS) + list(REMOVE_DUPLICATES EXTRA_LDFLAGS) +endif() +if(EXTRA_LIBS) + list(REMOVE_DUPLICATES EXTRA_LIBS) +endif() +if(EXTRA_CFLAGS) + list(REMOVE_DUPLICATES EXTRA_CFLAGS) +endif() +listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) +set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) + +# Compat helpers for the configuration files +if(NOT WINDOWS OR CYGWIN) + # TODO: we need a Windows script, too + execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) + + set(prefix ${CMAKE_INSTALL_PREFIX}) + set(exec_prefix "\${prefix}") + set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") + set(bindir "\${exec_prefix}/bin") + set(includedir "\${prefix}/include") + if(SDL_STATIC) + set(ENABLE_STATIC_TRUE "") + set(ENABLE_STATIC_FALSE "#") + else() + set(ENABLE_STATIC_TRUE "#") + set(ENABLE_STATIC_FALSE "") + endif() + if(SDL_SHARED) + set(ENABLE_SHARED_TRUE "") + set(ENABLE_SHARED_FALSE "#") + else() + set(ENABLE_SHARED_TRUE "#") + set(ENABLE_SHARED_FALSE "") + endif() + + # Clean up the different lists + listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") + set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) + list(REMOVE_DUPLICATES SDL_STATIC_LIBS) + listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) + set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) + listtostr(SDL_LIBS _SDL_LIBS) + set(SDL_LIBS ${_SDL_LIBS}) + + # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") + # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") + + configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" + "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config") + configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" + "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) + configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" + "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) +endif() + +##### Info output ##### +message(STATUS "") +message(STATUS "SDL2 was configured with the following options:") +message(STATUS "") +message(STATUS "Platform: ${CMAKE_SYSTEM}") +message(STATUS "64-bit: ${ARCH_64}") +message(STATUS "Compiler: ${CMAKE_C_COMPILER}") +message(STATUS "") +message(STATUS "Subsystems:") +foreach(_SUB ${SDL_SUBSYSTEMS}) + string(TOUPPER ${_SUB} _OPT) + message_bool_option(${_SUB} SDL_${_OPT}) +endforeach() +message(STATUS "") +message(STATUS "Options:") +list(SORT ALLOPTIONS) +foreach(_OPT ${ALLOPTIONS}) + # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters + # Get the padding + string(LENGTH ${_OPT} _OPTLEN) + math(EXPR _PADLEN "23 - ${_OPTLEN}") + string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) + message_tested_option(${_OPT} ${_PADDING}) +endforeach() +message(STATUS "") +message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") +message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") +message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") +message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") +message(STATUS "") +message(STATUS " Build Shared Library: ${SDL_SHARED}") +message(STATUS " Build Static Library: ${SDL_STATIC}") +if(SDL_STATIC) + message(STATUS " Build Static Library with Position Independent Code: ${SDL_STATIC_PIC}") +endif() +message(STATUS "") +if(UNIX) + message(STATUS "If something was not detected, although the libraries") + message(STATUS "were installed, then make sure you have set the") + message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") + message(STATUS "") +endif() + +# Ensure that the extra cflags are used at compile time +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") + +# Always build SDLmain +add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) +target_include_directories(SDL2main PUBLIC $) +set(_INSTALL_LIBS "SDL2main") +if (NOT ANDROID) + set_target_properties(SDL2main PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) +endif() + +if(SDL_SHARED) + add_library(SDL2 SHARED ${SOURCE_FILES} ${VERSION_SOURCES}) + if(APPLE) + set_target_properties(SDL2 PROPERTIES MACOSX_RPATH 1) + elseif(UNIX AND NOT ANDROID) + set_target_properties(SDL2 PROPERTIES + VERSION ${LT_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2-${LT_RELEASE}") + else() + set_target_properties(SDL2 PROPERTIES + VERSION ${SDL_VERSION} + SOVERSION ${LT_REVISION} + OUTPUT_NAME "SDL2") + endif() + if(MSVC AND NOT LIBC) + # Don't try to link with the default set of libraries. + set_target_properties(SDL2 PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2 PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2 PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) + target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) + target_include_directories(SDL2 PUBLIC $) + if (NOT ANDROID) + set_target_properties(SDL2 PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) + endif() +endif() + +if(SDL_STATIC) + set (BUILD_SHARED_LIBS FALSE) + add_library(SDL2-static STATIC ${SOURCE_FILES}) + if (NOT SDL_SHARED OR NOT WIN32) + set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") + # Note: Apparently, OUTPUT_NAME must really be unique; even when + # CMAKE_IMPORT_LIBRARY_SUFFIX or the like are given. Otherwise + # the static build may race with the import lib and one will get + # clobbered, when the suffix is realized via subsequent rename. + endif() + set_target_properties(SDL2-static PROPERTIES POSITION_INDEPENDENT_CODE ${SDL_STATIC_PIC}) + if(MSVC AND NOT LIBC) + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") + set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") + endif() + # TODO: Win32 platforms keep the same suffix .lib for import and static + # libraries - do we need to consider this? + set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) + target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) + target_include_directories(SDL2-static PUBLIC $) + if (NOT ANDROID) + set_target_properties(SDL2-static PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) + endif() +endif() + +##### Tests ##### + +if(SDL_TEST) + file(GLOB TEST_SOURCES ${SDL2_SOURCE_DIR}/src/test/*.c) + add_library(SDL2_test STATIC ${TEST_SOURCES}) + + add_subdirectory(test) +endif() + +##### Installation targets ##### +install(TARGETS ${_INSTALL_LIBS} EXPORT SDL2Targets + LIBRARY DESTINATION "lib${LIB_SUFFIX}" + ARCHIVE DESTINATION "lib${LIB_SUFFIX}" + RUNTIME DESTINATION bin) + +##### Export files ##### +if (APPLE) + set(PKG_PREFIX "SDL2.framework/Resources") +elseif (WINDOWS) + set(PKG_PREFIX "cmake") +else () + set(PKG_PREFIX "lib/cmake/SDL2") +endif () + +include(CMakePackageConfigHelpers) +write_basic_package_version_file("${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake" + VERSION ${SDL_VERSION} + COMPATIBILITY AnyNewerVersion +) + +install(EXPORT SDL2Targets + FILE SDL2Targets.cmake + NAMESPACE SDL2:: + DESTINATION ${PKG_PREFIX} +) +install( + FILES + ${CMAKE_CURRENT_SOURCE_DIR}/SDL2Config.cmake + ${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake + DESTINATION ${PKG_PREFIX} + COMPONENT Devel +) + +file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) +file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) +foreach(_FNAME ${BIN_INCLUDE_FILES}) + get_filename_component(_INCNAME ${_FNAME} NAME) + list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) +endforeach() +list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) +install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) + +if(NOT (WINDOWS OR CYGWIN)) + if(SDL_SHARED) + if (APPLE) + set(SOEXT "dylib") + else() + set(SOEXT "so") + endif() + if(NOT ANDROID) + install(CODE " + execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink + \"libSDL2-2.0.${SOEXT}\" \"libSDL2.${SOEXT}\")") + install(FILES ${SDL2_BINARY_DIR}/libSDL2.${SOEXT} DESTINATION "lib${LIB_SUFFIX}") + endif() + endif() + if(FREEBSD) + # FreeBSD uses ${PREFIX}/libdata/pkgconfig + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") + else() + install(FILES ${SDL2_BINARY_DIR}/sdl2.pc + DESTINATION "lib${LIB_SUFFIX}/pkgconfig") + endif() + install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) + # TODO: what about the .spec file? Is it only needed for RPM creation? + install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "${CMAKE_INSTALL_FULL_DATAROOTDIR}/aclocal") +endif() + +##### Uninstall target ##### + +if(NOT TARGET uninstall) + configure_file( + "${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in" + "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" + IMMEDIATE @ONLY) + + add_custom_target(uninstall + COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) +endif() + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh new file mode 100644 index 0000000..6ad62d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +#pattern=*.py CMakeLists.txt +pattern="utils.cmake common.yml CMakeLists.txt *.py" +other_dir=$1 + +for i in $(ls $pattern); do + if [ -f $other_dir/$i ]; then + diff $i $other_dir/$i > /dev/null || meld $i $other_dir/$i + fi +done + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py new file mode 100644 index 0000000..aaad57e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py @@ -0,0 +1,1508 @@ +import os +import sys +import utils +import logging +import traceback +import datetime +import hash_version +import copy +import fnmatch + + +class InvalidPlatform(Exception): + def __init__(self, plat): + self._plat = plat + def __str__(self): + return "Invalid platform detected: %s" % self._plat + + +class DontExistsFile(Exception): + def __init__(self, source_filename): + self._source_filename = source_filename + def __str__(self): + return 'Dont exists file %s' % self._source_filename + + +class FailPrepare(Exception): + def __init__(self, node): + self._node = node + def __str__(self): + return 'Failing preparing package: %s' % self._node.get_package_name() + + +class AmbiguationLibs(Exception): + def __init__(self, kind, package, build_mode): + self._kind = kind + self._package = package + self._build_mode = build_mode + def __str__(self): + return "Ambiguation in %s in %s. Mode: %s. Candidates:" % (self._kind, self._package, self._build_mode) + + +class NotFoundInDataset(Exception): + def __init__(self, msg): + self._msg = msg + def __str__(self): + return "%s" % self._msg + + +class FailThirdParty(Exception): + def __init__(self, msg): + self._msg = msg + def __str__(self): + return "%s" % self._msg + + +class Found(Exception): + pass + + +def prepare_cmakefiles(cmake_files): + if not os.path.isdir(cmake_files): + logging.error('Invalid cmake files: {}'.format(camkefiles)) + + +def get_identifier(mode): + env = os.environ.copy() + cmaki_pwd = env['CMAKI_PWD'] + if utils.is_windows(): + script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.exe') + else: + script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.sh') + if not os.path.isfile(script_identifier): + raise Exception("there is no {} script".format(script_identifier)) + env['CMAKI_INFO'] = mode + return list(utils.get_stdout(script_identifier, env=env))[0] + + +def search_fuzzy(data, fuzzy_key, fallback='default'): + for key in data: + if fnmatch.fnmatch(fuzzy_key, key): + return data[key] + else: + if fallback in data: + return data[fallback] + else: + logging.error("not found 'default' platform or %s" % fuzzy_key) + raise Exception("not found '{}'".format(fuzzy_key)) + + +if 'MODE' not in os.environ: + logging.warning('Using Debug by default. For explicit use, define environment var MODE') + os.environ['MODE'] = 'Debug' + +if 'CMAKI_INSTALL' not in os.environ: + logging.warning('Using CMAKI_INSTALL by default. For explicit use, define environment var CMAKI_INSTALL') + os.environ['CMAKI_INSTALL'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier', 'bin') + +if 'CMAKI_PWD' not in os.environ: + logging.warning('Using CMAKI_PWD by default. For explicit use, define environment var CMAKI_PWD') + os.environ['CMAKI_PWD'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier') + + +# +# INMUTABLE GLOBALS +# + +CMAKELIB_URL='https://github.com/makiolo/cmaki.git' +prefered = {} +prefered['Debug'] = ['Debug', 'RelWithDebInfo', 'Release'] +prefered['RelWithDebInfo'] = ['RelWithDebInfo', 'Release', 'Debug'] +prefered['Release'] = ['Release', 'RelWithDebInfo', 'Debug'] +magic_invalid_file = '__not_found__' +exceptions_fail_group = (OSError, IOError, ) +exceptions_fail_program = (KeyboardInterrupt, ) +uncompress_strip_default = '.' +uncompress_prefix_default = '.' +priority_default = 50 +build_unittests_foldername = 'unittest' +# detect platform +platform = get_identifier('ALL') +arch = get_identifier('ARCH') +operative_system = get_identifier('OS') +somask_id = operative_system[0] +archs = {platform: arch} +platforms = [platform] +logging.info('Detecting platform from script like: {} / {}'.format(platform, arch)) + +alias_priority_name = { 10: 'minimal', + 20: 'tools', + 30: 'third_party' } +alias_priority_name_inverse = {v: k for k, v in alias_priority_name.items()} + + +def is_valid(package_name, mask): + return (mask.find(somask_id) != -1) and (package_name != 'dummy') + + +def is_blacklisted(blacklist_file, no_blacklist, package_name): + blacklisted = False + if os.path.exists(blacklist_file): + with open(blacklist_file, 'rt') as f: + for line in f.readlines(): + if line.strip() == package_name: + blacklisted = True + break + # --no-blacklist can annular effect of blacklist + if blacklisted and (package_name in no_blacklist): + blacklisted = False + return blacklisted + + +class ThirdParty: + def __init__(self, user_parameters, name, parameters): + self.user_parameters = user_parameters + self.name = name + self.parameters = parameters + self.depends = [] + self.exceptions = [] + self.interrupted = False + self.ret = 0 # Initial return code + self.fail_stage = "" + self.blacklisted = is_blacklisted(self.user_parameters.blacklist, self.user_parameters.no_blacklist, self.get_package_name()) + self.published_invalidation = False + + + def __hash__(self): + return hash((self.get_package_name(), self.get_priority(), self.get_mask())) + + + def __eq__(self, other): + return (self.get_package_name() == other.get_package_name()) and (self.get_priority() == other.get_priority()) and (self.get_mask() == other.get_mask()) + + + def __ne__(self, other): + return not self.__eq__(other) + + + def __repr__(self): + return "%s (%s)" % (self.get_package_name(), self.get_mask()) + + + def __str__(self): + return "%s (%s)" % (self.get_package_name(), self.get_mask()) + + + def get_uncompress_strip(self, pos = 0): + try: + if isinstance(self.parameters['uncompress_strip'], list): + return self.parameters['uncompress_strip'][pos] + else: + return self.parameters['uncompress_strip'] + except KeyError: + # default value + return uncompress_strip_default + + + def get_uncompress_prefix(self, pos = 0): + try: + if isinstance(self.parameters['uncompress_prefix'], list): + return self.parameters['uncompress_prefix'][pos] + else: + return self.parameters['uncompress_prefix'] + except KeyError: + # default value + return uncompress_prefix_default + + + def get_uncompress(self, pos = 0): + try: + if self.parameters['uncompress'] is not None: + if isinstance(self.parameters['uncompress'], list): + return self.parameters['uncompress'][pos].find(somask_id) != -1 + else: + return self.parameters['uncompress'].find(somask_id) != -1 + else: + return False + except KeyError: + # default value + return True + + + def get_depends_raw(self): + return self.depends + + + def get_depends(self): + try: + return self.parameters['depends'] + except KeyError: + # default value + return None + + + def get_generate_custom_script(self, source_dir): + path_build = self.get_path_custom_script(source_dir, name='.build') + build_content = self.get_build_script_content() + if build_content is not None: + with open(path_build, 'wt') as f: + f.write(build_content) + + + def get_path_custom_script(self, source_folder, name = 'build'): + if utils.is_windows(): + path_build = os.path.join(source_folder, name + '.cmd') + else: + path_build = os.path.join(source_folder, name + '.sh') + return path_build + + + def has_custom_script(self, source_folder): + script_custom = os.path.exists( self.get_path_custom_script(source_folder) ) + return (self.get_build_script_content() is not None) or script_custom + + + def get_build_script_content(self): + try: + if not utils.is_windows(): + return self.parameters['build'] + else: + return self.parameters['build_windows'] + except KeyError: + # default value + return None + + + def get_source(self): + try: + source = self.parameters['source'] + if source is not None: + if not isinstance(source, list): + return [source] + else: + return source + else: + return [] + except KeyError: + # default value + return [] + + + def get_source_filename(self, position=0): + try: + return self.parameters['source_filename'] + except KeyError: + # default value + source = self.get_source()[position] + filename = source.split('/')[-1] + return filename + + + def get_sources_all(self, position=0): + try: + return self.parameters['sources_all'] + except KeyError: + return False + + + def get_before_copy(self): + try: + return self.parameters['before_copy'] + except KeyError: + # default value + return [] + + + def get_short_path(self): + try: + return self.parameters['short_path'] + except KeyError: + # default value + return False + + + def has_library(self, platform_info): + package = self.get_package_name() + return (('static' in platform_info) and (package != 'dummy')) or (('dynamic' in platform_info) and (package != 'dummy')) + + + def needs(self, node): + if node.is_valid(): + self.depends.append(node) + + + def get_package_name(self): + return self.name + + + def get_package_name_norm(self): + package = self.get_package_name() + for c in '-\\/:*?"<>|': + package = package.replace(c, '_') + return package + + + def get_package_name_norm_upper(self): + package_norm = self.get_package_name_norm() + return package_norm.upper() + + + def set_version(self, newversion): + self.parameters['version'] = newversion + + + def get_version(self): + try: + version = self.parameters['version'] + if version is None: + return '0.0.0.0' + else: + return version + except KeyError: + if self.get_package_name() != 'dummy': + raise Exception('[%s] Version is a mandatory field.' % self.get_package_name()) + + + def get_version_manager(self): + try: + version = self.get_version() + if version == '0.0.0.0': + return self.parameters['version_manager'] + else: + # si tiene version -> no usar renombrado git + return None + except KeyError: + return None + + + def get_cmake_target(self): + try: + return self.parameters['cmake_target'] + except KeyError: + return 'install' + + + def get_post_install(self): + try: + return self.parameters['post_install'] + except KeyError: + return [] + + + def get_priority(self): + try: + return int(self.parameters['priority']) + except KeyError: + return priority_default + + + def is_packing(self): + try: + return self.parameters['packing'] + except KeyError: + # default value + return True + + + def get_branch(self): + try: + return self.parameters['branch'] + except KeyError: + # default value + return None + + + def get_build_modes(self): + build_modes = [] + try: + if 'MODE' in os.environ and (os.environ['MODE'] != 'UNDEFINED'): + build_modes.append(os.environ['MODE']) + else: + mode = self.parameters['mode'] + if mode.find('d') != -1: + build_modes.append('Debug') + if mode.find('i') != -1: + build_modes.append('RelWithDebInfo') + if mode.find('r') != -1: + build_modes.append('Release') + except KeyError: + # no mode provided + build_modes.append('Debug') + build_modes.append('RelWithDebInfo') + build_modes.append('Release') + return build_modes + + + def get_mask(self): + try: + return self.parameters['mask'] + except KeyError: + return somask_id + + + def is_valid(self): + if self.blacklisted: + if not self.published_invalidation: + logging.debug('%s is not built because is blacklisted in %s' % (self.get_package_name(), os.path.basename(self.user_parameters.blacklist))) + self.published_invalidation = True + return False + return is_valid(self.get_package_name(), self.get_mask()) + + + def resolver(self, resolved, seen): + seen.append(self) + for edge in self.depends: + if edge not in resolved: + if edge in seen: + raise Exception('Circular reference detected: %s and %s' % (self.get_package_name(), edge.name)) + edge.resolver(resolved, seen) + if self.is_valid(): + resolved.append(self) + seen.remove(self) + + + def get_targets(self): + try: + return self.parameters['targets'] + except KeyError: + # default value + return [] + + + def get_exclude_from_all(self): + try: + return self.parameters['exclude_from_all'] + except KeyError: + # default value + return False + + + def get_exclude_from_clean(self): + try: + return self.parameters['exclude_from_clean'] + except KeyError: + # default value + return False + + + def get_unittest(self): + try: + return self.parameters['unittest'] + except KeyError: + # default value + return None + + + def get_cmake_prefix(self): + try: + cmake_prefix = self.parameters['cmake_prefix'] + if cmake_prefix.endswith('CMakeLists.txt'): + return os.path.dirname(cmake_prefix) + return cmake_prefix + except KeyError: + # default value + return "." + + + def get_generator_targets(self, plat, _, compiler_cpp, ext_sta, ext_dyn): + + package = self.get_package_name_norm() + + for targets in self.get_targets(): + + for target_name in targets: + + platform_info = None + platform_extra = None + + target_info = targets[target_name] + if 'info' in target_info: + outputinfo = search_fuzzy(target_info['info'], plat) + if outputinfo is not None: + platform_info = copy.deepcopy( outputinfo ) + + if 'extra' in target_info: + outputinfo_extra = search_fuzzy(target_info['extra'], plat) + if outputinfo_extra is not None: + platform_extra = copy.deepcopy( outputinfo_extra ) + + if (platform_info is not None) and (platform_extra is not None): + platform_info = utils.smart_merge(platform_info, platform_extra) + + # variables for use in "info" and "extra" + platform_info = utils.apply_replaces_vars(platform_info, { + 'TARGET': target_name, + 'TARGET_UPPER': target_name.upper(), + 'PACKAGE': package, + 'PACKAGE_UPPER': package.upper(), + 'PLATFORM': plat, + 'COMPILER': os.path.basename(compiler_cpp), + 'EXT_DYN': ext_dyn, + 'EXT_STA': ext_sta, + 'ARCH': archs[plat], + }) + + if platform_info is None: + logging.error('No platform info in package %s, platform %s' % (package, plat)) + logging.error("%s" % targets) + sys.exit(1) + + yield (target_name, platform_info) + + + def have_any_in_target(self, plat, key, compiler_replace_maps): + any_static = False + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): + for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + if key in platform_info: + any_static = True + return any_static + + + def get_generate_find_package(self): + try: + return self.parameters['generate_find_package'] + except KeyError: + # default value + return True + + + def compiler_iterator(self, plat, compiler_replace_maps): + + plat_parms = search_fuzzy(self.parameters['platforms'], plat) + try: + generator = plat_parms['generator'] + except KeyError: + generator = None + + try: + compilers = plat_parms['compiler'] + except KeyError: + compilers = None + + # resolve map + compiler_replace_resolved = {} + for var, value in compiler_replace_maps.items(): + new_value = value + new_value = new_value.replace('$PLATFORM', plat) + compiler_replace_resolved[var] = new_value + compiler_replace_resolved['$ARCH'] = archs[plat] + compiler_replace_resolved['${ARCH}'] = archs[plat] + + # get compiler info + compiler = get_identifier('COMPILER') + + ext_dyn = plat_parms['ext_dyn'] + ext_sta = plat_parms['ext_sta'] + if compilers is None: + compilers = [('%s, %s' % (compiler, compiler))] + + for compiler in compilers: + compilers_tuple = compiler.split(',') + assert(len(compilers_tuple) == 2) + compiler_c = compilers_tuple[0].strip() + compiler_cpp = compilers_tuple[1].strip() + + compiler_c = utils.apply_replaces(compiler_c, compiler_replace_resolved) + compiler_cpp = utils.apply_replaces(compiler_cpp, compiler_replace_resolved) + + env_new = {} + env_modified = os.environ.copy() + + for env_iter in [env_modified, env_new]: + + env_iter['COMPILER'] = str(compiler) + env_iter['PLATFORM'] = str(plat) + env_iter['PACKAGE'] = str(self.get_package_name()) + env_iter['VERSION'] = str(self.get_version()) + env_iter['ARCH'] = str(archs[plat]) + + try: + environment = plat_parms['environment'] + + try: + environment_remove = environment['remove'] + for key, values in environment_remove.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + oldpath = oldpath.replace(v, '') + env_iter[key] = oldpath + except KeyError: + pass + + # insert front with seprator = ":" + try: + environment_push_front = environment['push_front'] + for key, values in environment_push_front.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + if len(oldpath) == 0: + separator = '' + else: + # -L / -I / -R use space + if v.startswith('-'): + separator = ' ' + else: + separator = ':' + oldpath = str('%s%s%s' % (v, separator, oldpath)) + env_iter[key] = oldpath + except KeyError: + pass + + # insert back with separator " " + try: + environment_flags = environment['flags'] + for key, values in environment_flags.items(): + try: + oldpath = env_iter[key] + except KeyError: + oldpath = '' + uniq_values = set() + for v in values: + v = utils.apply_replaces(v, compiler_replace_resolved) + uniq_values.add(v) + for v in uniq_values: + if len(oldpath) == 0: + separator = '' + else: + separator = ' ' + oldpath = str('%s%s%s' % (oldpath, separator, v)) + env_iter[key] = oldpath + except KeyError: + pass + + # insert new environment variables + try: + environment_assign = environment['assign'] + for key, value in environment_assign.items(): + value = utils.apply_replaces(value, compiler_replace_resolved) + env_iter[key] = value + except KeyError: + pass + + except KeyError: + pass + + yield (compiler_c, compiler_cpp, generator, ext_sta, ext_dyn, env_modified, env_new) + + + def remove_cmake3p(self, cmake3p_dir): + package_cmake3p = os.path.join(cmake3p_dir, self.get_base_folder()) + logging.debug('Removing cmake3p %s' % package_cmake3p) + if os.path.exists(package_cmake3p): + utils.tryremove_dir(package_cmake3p) + for dep in self.get_depends_raw(): + dep.remove_cmake3p(cmake3p_dir) + + + def get_base_folder(self): + package = self.get_package_name() + version = self.get_version() + return '%s-%s' % (package, version) + + + def get_workspace(self, plat): + package = self.get_package_name() + version = self.get_version() + return '%s-%s-%s' % (package, version, plat) + + + def get_build_directory(self, plat, build_mode): + package = self.get_package_name() + version = self.get_version() + if not self.get_short_path(): + return '.build_%s-%s-%s_%s' % (package, version, plat, build_mode) + else: + return '.bs_%s%s%s%s' % (package[:3], version[-1:], plat, build_mode) + + def get_binary_workspace(self, plat): + install_directory = os.path.join(self.user_parameters.prefix, self.get_workspace(plat)) + utils.trymkdir(install_directory) + return install_directory + + + def get_install_directory(self, plat): + install_directory = os.path.join(self.get_binary_workspace(plat), self.get_base_folder(), plat) + return install_directory + + + def get_download_directory(self): + package = self.get_package_name() + return '.download_%s' % package + + + def get_original_directory(self): + package = self.get_package_name() + return '.download_original_%s' % package + + + def apply_replace_maps(self, compiler_replace_maps): + package = self.get_package_name() + package_norm = self.get_package_name_norm() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + basedir = os.path.abspath('..') + compiler_replace_maps['$%s_BASE' % package_norm] = os.path.join(basedir, self.get_workspace('$PLATFORM'), self.get_base_folder()) + + + def generate_scripts_headers(self, compiler_replace_maps): + package = self.get_package_name() + package_norm = self.get_package_name_norm() + version = self.get_version() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + basedir = self.user_parameters.prefix + rootdir = self.user_parameters.rootdir + + # generate find.cmake + build_directory = self.get_build_directory(r"${CMAKI_PLATFORM}", r"${GLOBAL_BUILD_MODE}") + with open('find.cmake', 'wt') as f: + f.write("SET(%s_VERSION %s CACHE STRING \"Last version compiled ${PACKAGE}\" FORCE)\n" % (package_norm, version)) + f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}/include\" %s_INCLUDE)\n" % (basedir, package, version, package, version, package_norm)) + f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}\" %s_LIBDIR)\n" % (basedir, package, version, package, version, package_norm)) + f.write("file(TO_NATIVE_PATH \"%s/%s\" %s_BUILD)\n" % (rootdir, build_directory, package_norm)) + f.write("SET(%s_INCLUDE ${%s_INCLUDE} CACHE STRING \"Include dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + f.write("SET(%s_LIBDIR ${%s_LIBDIR} CACHE STRING \"Libs dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + f.write("SET(%s_BUILD ${%s_BUILD} CACHE STRING \"Build dir %s\" FORCE)\n" % (package_norm, package_norm, package)) + + # genereate find.script / cmd + if utils.is_windows(): + build_directory = self.get_build_directory("%PLATFORM%", "%BUILD_MODE%") + with open('find.cmd', 'wt') as f: + f.write("set %s_VERSION=%s\n" % (package_norm, version)) + f.write("set %s_HOME=%s\%s-%s-%%PLATFORM%%\%s-%s\%%PLATFORM%%\n" % (package_norm, basedir, package, version, package, version)) + f.write("set %s_BASE=%s\%s-%s-%%PLATFORM%%\%s-%s\n" % (package_norm, basedir, package, version, package, version)) + f.write("set SELFHOME=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\%%PLATFORM%%\n" % (basedir)) + f.write("set SELFBASE=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\n" % (basedir)) + f.write("set %s_BUILD=%s\%s\n" % (package_norm, rootdir, build_directory)) + f.write(r"md %SELFHOME%") + f.write("\n") + else: + build_directory = self.get_build_directory("${PLATFORM}", "${BUILD_MODE}") + with open('find.script', 'wt') as f: + f.write("#!/bin/bash\n") + f.write("%s_VERSION=%s\n" % (package_norm, version)) + f.write("%s_HOME=%s/%s-%s-$PLATFORM/%s-%s/$PLATFORM\n" % (package_norm, basedir, package, version, package, version)) + f.write("%s_BASE=%s/%s-%s-$PLATFORM/%s-%s\n" % (package_norm, basedir, package, version, package, version)) + f.write("SELFHOME=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION/$PLATFORM\n" % (basedir)) + f.write("SELFBASE=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION\n" % (basedir)) + f.write("%s_BUILD=%s/%s\n" % (package_norm, rootdir, build_directory)) + f.write("mkdir -p $SELFHOME\n") + + + def remove_cmakefiles(self): + utils.tryremove('CMakeCache.txt') + utils.tryremove('cmake_install.cmake') + utils.tryremove('install_manifest.txt') + utils.tryremove_dir('CMakeFiles') + + + def remove_scripts_headers(self): + package = self.get_package_name() + to_package = os.path.abspath(package) + utils.trymkdir(to_package) + with utils.working_directory(to_package): + utils.tryremove('find.cmake') + utils.tryremove('find.script') + utils.tryremove('find.cmd') + utils.tryremove('.build.sh') + utils.tryremove('.build.cmd') + utils.tryremove_dir_empty(to_package) + + + def generate_3rdpartyversion(self, output_dir): + package = self.get_package_name() + package_norm_upper = self.get_package_name_norm_upper() + version = self.get_version() + packing = self.is_packing() + if not packing: + logging.debug("package %s, don't need 3rdpartyversion" % package) + return + thirdparty_path = os.path.join(output_dir, '3rdpartyversions') + utils.trymkdir(thirdparty_path) + with utils.working_directory(thirdparty_path): + with open('%s.cmake' % package, 'wt') as f: + f.write('SET(%s_REQUIRED_VERSION %s EXACT)\n' % (package_norm_upper, version)) + + + def _smart_uncompress(self, position, package_file_abs, uncompress_directory, destiny_directory, compiler_replace_maps): + uncompress = self.get_uncompress(position) + uncompress_strip = self.get_uncompress_strip(position) + uncompress_prefix = self.get_uncompress_prefix(position) + if uncompress: + if (uncompress_strip == uncompress_strip_default) and (uncompress_prefix == uncompress_prefix_default): + # case fast (don't need intermediate folder) + ok = utils.extract_file(package_file_abs, destiny_directory, self.get_first_environment(compiler_replace_maps)) + else: + source_with_strip = os.path.join(uncompress_directory, uncompress_strip) + destiny_with_prefix = os.path.join(destiny_directory, uncompress_prefix) + ok = utils.extract_file(package_file_abs, uncompress_directory, self.get_first_environment(compiler_replace_maps)) + utils.move_folder_recursive(source_with_strip, destiny_with_prefix) + utils.tryremove_dir(source_with_strip) + if not ok: + raise Exception('Invalid uncompressed package %s - %s' % (package, package_file_abs)) + + + def _prepare_third_party(self, position, url, build_directory, compiler_replace_maps): + package = self.get_package_name() + source_filename = self.get_source_filename(position) + uncompress_strip = self.get_uncompress_strip(position) + uncompress_prefix = self.get_uncompress_prefix(position) + uncompress = self.get_uncompress(position) + uncompress_directory = self.get_download_directory() + utils.trymkdir(uncompress_directory) + + logging.debug('source_filename = %s' % source_filename) + logging.debug('uncompress_strip = %s' % uncompress_strip) + logging.debug('uncompress_prefix = %s' % uncompress_prefix) + logging.debug('uncompress = %s' % uncompress) + + # resolve url vars + url = url.replace('$NPP_SERVER', os.environ['NPP_SERVER']) + + # files in svn + if(url.startswith('svn://')): + # strip is not implemmented with svn:// + utils.tryremove_dir( build_directory ) + logging.info('Download from svn: %s' % url) + self.safe_system( 'svn co %s %s' % (url, build_directory), compiler_replace_maps ) + # utils.tryremove_dir( os.path.join(build_directory, '.svn') ) + + elif(url.endswith('.git') or (url.find('github') != -1) or (url.find('bitbucket') != -1)) and not ( url.endswith('.zip') or url.endswith('.tar.gz') or url.endswith('.tar.bz2') or url.endswith('.tgz') or url.endswith('.py') ): + # strip is not implemmented with git:// + utils.tryremove_dir( build_directory ) + logging.info('Download from git: %s' % url) + branch = self.get_branch() + extra_cmd = '' + if branch is not None: + logging.info('clonning to branch %s' % branch) + extra_cmd = '%s' % branch + self.safe_system('git clone %s --depth=200 %s %s' % (extra_cmd, url, build_directory), compiler_replace_maps) + # self.safe_system('git clone %s %s' % (url, build_directory), compiler_replace_maps) + with utils.working_directory(build_directory): + # self.safe_system('git checkout {}'.format(extra_cmd), compiler_replace_maps) + self.safe_system('git submodule init', compiler_replace_maps) + self.safe_system('git submodule update', compiler_replace_maps) + # depends_file = self.user_parameters.depends + # if depends_file is not None: + # with utils.working_directory(build_directory): + # # leer el fichero de dependencias + # if os.path.exists(depends_file): + # data = utils.deserialize(depends_file) + # else: + # data = {} + # + # # obedecer, si trae algo util + # if package in data: + # logging.debug('data package version is %s' % data[package]) + # try: + # git_version = hash_version.to_git_version(build_directory, data[package]) + # logging.debug('data package in git version is %s' % git_version) + # logging.debug('updating to revision %s' % git_version) + # self.safe_system('git reset --hard %s' % git_version, compiler_replace_maps) + # except AssertionError: + # logging.info('using HEAD') + # + # # actualizar y reescribir + # revision = hash_version.get_last_version(build_directory) + # assert(len(revision) > 0) + # data[package] = revision + # utils.serialize(data, depends_file) + # else: + # logging.warning('not found depends file, using newest changeset') + + # file in http + elif ( url.startswith('http://') + or url.startswith('https://') + or url.endswith('.zip') + or url.endswith('.tar.gz') + or url.endswith('.tar.bz2') + or url.endswith('.tgz') + or url.endswith('.py') ): + + logging.info('Download from url: %s' % url) + # download to source_filename + package_file_abs = os.path.join(uncompress_directory, source_filename) + utils.download_from_url(url, package_file_abs) + if os.path.isfile(package_file_abs): + + # uncompress in download folder for after generate a patch with all changes + if not os.path.isdir( self.get_original_directory() ): + utils.trymkdir( self.get_original_directory() ) + logging.debug('preparing original uncompress') + # uncompress in original + self._smart_uncompress(position, package_file_abs, uncompress_directory, self.get_original_directory(), compiler_replace_maps) + else: + logging.debug('skipping original uncompress (already exists)') + + # uncompress in intermediate build directory + self._smart_uncompress(position, package_file_abs, uncompress_directory, build_directory, compiler_replace_maps) + + else: + raise DontExistsFile(source_filename) + + else: + raise Exception('Invalid source: %s - %s' % (package, url)) + + + def prepare_third_party(self, build_directory, compiler_replace_maps): + utils.trymkdir(build_directory) + package = self.get_package_name() + version = self.get_version() + sources_all = self.get_sources_all() + exceptions = [] + i = 0 + for source_url in self.get_source(): + if (source_url is None) or (len(source_url) <= 0) or (source_url == 'skip'): + logging.warning('[%s %s] Skipping preparation ...' % (package, version)) + else: + logging.warning('[%s %s] trying prepare from %s ...' % (package, version, source_url)) + try: + self._prepare_third_party(i, source_url, build_directory, compiler_replace_maps) + if not sources_all: + # sources_all = false ---> any source + # sources_all = Trie ----> all source + break + except exceptions_fail_group + exceptions_fail_program: + raise + except: + exceptions.append(sys.exc_info()) + i += 1 + if len(exceptions) > 0: + i = 0 + for exc_type, exc_value, exc_traceback in exceptions: + print ("---- Exception #%d / %d ----------" % (i+1, len(exceptions))) + traceback.print_exception(exc_type, exc_value, exc_traceback) + print ("----------------------------------") + i += 1 + raise FailPrepare(self) + + + def get_prefered_build_mode(self, prefered_build_mode_list): + build_modes = self.get_build_modes() + assert(len(prefered_build_mode_list) > 0) + prefered_build_mode = prefered_build_mode_list[0] + while (prefered_build_mode not in build_modes) and (len(prefered_build_mode_list)>0): + prefered_build_mode_list.pop(0) + if len(prefered_build_mode_list) > 0: + prefered_build_mode = prefered_build_mode_list[0] + return prefered_build_mode + + + def generate_cmake_condition(self, platforms, compiler_replace_maps): + target_uniques = set() + condition = '' + i = 0 + for plat in platforms: + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): + for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + package_lower = package.lower() + if (package_lower not in target_uniques) and (package_lower != 'dummy'): + target_uniques.add(package_lower) + if self.has_library(platform_info): + if i == 0: + condition += '(NOT TARGET %s)' % package_lower + else: + condition += ' OR (NOT TARGET %s)' % package_lower + i += 1 + return condition + + + def _search_library(self, rootdir, special_pattern): + ''' + 3 cases: + string + pattern as special string + list of strings + ''' + logging.debug('-- searching in {} with pattern: {}'.format(rootdir, special_pattern)) + + if special_pattern is None: + logging.debug('Failed searching lib in %s' % rootdir) + return False, None + + package = self.get_package_name() + if isinstance(special_pattern, list): + utils.verbose(self.user_parameters, 'Searching list %s' % special_pattern) + valid_ff = None + for ff in special_pattern: + valid, valid_ff = self._search_library(rootdir, utils.get_norm_path(ff)) + if valid: + break + return valid, valid_ff + + elif special_pattern.startswith('/') and special_pattern.endswith('/'): + pattern = special_pattern[1:-1] + utils.verbose(self.user_parameters, 'Searching rootdir %s, pattern %s' % (rootdir, pattern)) + files_found = utils.rec_glob(rootdir, pattern) + utils.verbose(self.user_parameters, 'Candidates %s' % files_found) + if len(files_found) == 1: + relfile = os.path.relpath(files_found[0], rootdir) + return True, utils.get_norm_path(relfile) + elif len(files_found) == 0: + msg = 'No library found in %s with pattern %s' % (rootdir, pattern) + logging.debug(msg) + return False, None + else: + msg = "Ambiguation in %s" % (package) + logging.debug(msg) + return False, None + else: + pathfull = os.path.join(rootdir, special_pattern) + utils.verbose(self.user_parameters, 'Checking file %s' % pathfull) + if os.path.exists(pathfull): + return True, utils.get_norm_path(special_pattern) + else: + return False, None + + + def search_library(self, workbase, dataset, kind, rootdir=None): + ''' + can throw exception + ''' + build_mode = self.get_prefered_build_mode(prefered[os.environ['MODE']]) + if rootdir is None: + rootdir = workbase + utils.verbose(self.user_parameters, 'Searching rootdir %s' % (rootdir)) + if (build_mode.lower() in dataset) and (kind in dataset[build_mode.lower()]): + special_pattern = dataset[build_mode.lower()][kind] + valid, valid_ff = self._search_library(rootdir, special_pattern) + if valid: + return valid_ff + else: + package = self.get_package_name() + raise AmbiguationLibs(kind, package, build_mode) + else: + raise NotFoundInDataset("Not found in dataset, searching %s - %s" % (build_mode.lower(), kind)) + + + def search_library_noexcept(self, workbase, dataset, kind): + try: + rootdir = os.path.abspath(workbase) + finalpath = self.search_library(workbase, dataset, kind, rootdir) + utils.superverbose(self.user_parameters, '[01] path: %s' % finalpath) + return finalpath + except AmbiguationLibs: + finalpath = '%s.%s' % (magic_invalid_file, kind) + utils.superverbose(self.user_parameters, '[02] path: %s' % finalpath) + return finalpath + except NotFoundInDataset: + finalpath = '%s.%s' % (magic_invalid_file, kind) + utils.superverbose(self.user_parameters, '[03] path: %s' % finalpath) + return finalpath + + + def check_parts_exists(self, workbase, package, target, dataset, kindlibs, build_modes=None): + ''' + Asegura que todas las partes del target existen, devuelve True o False si todas las partes existen + + workbase: directorio de instalacion base + package: nombre del paquete + target: nombre del target + dataset: es la estructura que contiene las estrategias de busqueda + {"debug": {"part1": ["*.dll", "*d.dll"]}, "release": {"part1": ["*_release.dll"]}} + kindlibs: tupla de partes a verificar, cada tupla representa (tipo, obligatoriedad) + build_modes: restringuir la busqueda a ciertos build modes + ''' + + all_ok = True + if build_modes is None: + build_modes = self.get_build_modes() + for build_mode in build_modes: + for kind, must in kindlibs: + try: + part_fullpath = os.path.join(workbase, self.search_library_noexcept(workbase, dataset, kind)) + if not os.path.exists(part_fullpath): + if must: + logging.error("[%s] Don't found %s in %s. Mode: %s. Path: %s. Dataset: %s" % (package, kind, target, build_mode, part_fullpath, dataset)) + all_ok = False + else: + msg = "[%s] Don't found %s in %s. Mode: %s. Path: %s" % (package, kind, target, build_mode, part_fullpath) + if build_mode != 'Release': + logging.warning(msg) + else: + logging.debug(msg) + except NotFoundInDataset as e: + if must: + logging.error("[ERROR] [NOT FOUND] [%s] %s" % (package, e)) + all_ok = False + return all_ok + + + def is_invalid_lib(self, libpath): + return (libpath is None) or (utils.get_filename_no_ext(os.path.basename(libpath)) == magic_invalid_file) + + + def generate_cmakefiles(self, platforms, folder_output, compiler_replace_maps): + errors = 0 + packing = self.is_packing() + if not packing: + logging.warning("package: %s don't need generate cmakefiles" % self.get_package_name()) + return errors + oldcwd = os.getcwd() + utils.trymkdir(folder_output) + with utils.working_directory(folder_output): + package = self.get_package_name() + package_lower = package.lower() + package_upper = package.upper() + with open('%s-config.cmake' % package_lower, 'wt') as f: + f.write('''CMAKE_POLICY(PUSH) +CMAKE_POLICY(VERSION 3.0) +cmake_minimum_required(VERSION 3.0) +cmake_policy(SET CMP0011 NEW) + ''') + + condition = self.generate_cmake_condition(platforms, compiler_replace_maps) + if len(condition) > 0: + f.write('\nif(%s)\n' % condition) + + f.write('''\ninclude(${CMAKI_PATH}/facts/facts.cmake) +cmaki_download_package() +file(TO_NATIVE_PATH "${_DIR}" %s_HOME) +file(TO_NATIVE_PATH "${_DIR}/${CMAKI_PLATFORM}" %s_PREFIX) +set(%s_HOME "${%s_HOME}" PARENT_SCOPE) +set(%s_PREFIX "${%s_PREFIX}" PARENT_SCOPE) +include(${_MY_DIR}/${CMAKI_PLATFORM}.cmake) + ''' % (package_upper, package_upper, package_upper, package_upper, package_upper, package_upper)) + + if len(condition) > 0: + f.write('\nendif()\n') + + f.write('\nCMAKE_POLICY(POP)') + + with open('%s-config-version.cmake' % package_lower, 'wt') as f: + f.write('''\ +cmake_minimum_required(VERSION 3.0) +cmake_policy(SET CMP0011 NEW) +include(${CMAKI_PATH}/facts/facts.cmake) +cmaki_package_version_check() + ''') + + for plat in platforms: + + workspace = self.get_workspace(plat) + base_folder = self.get_base_folder() + + for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): + + with open('%s.cmake' % (plat), 'wt') as f: + + install_3rdparty_dependencies = True + + includes_set = [] + definitions_set = [] + system_depends_set = [] + depends_set = set() + + for target, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): + + target_lower = target.lower() + target_upper = target.upper() + + if self.has_library(platform_info) and (target != 'dummy'): + f.write('if(NOT TARGET %s)\n\n' % target_lower) + + try: + add_3rdparty_dependencies = platform_info['add_3rdparty_dependencies'] + except KeyError: + add_3rdparty_dependencies = True + + try: + lib_provided = platform_info['lib_provided'] + except KeyError: + lib_provided = True + + if 'include' in platform_info: + include = platform_info['include'] + for d in include: + includes_set.append(d) + + # rename to definitions + if 'definitions' in platform_info: + definitions = platform_info['definitions'] + if definitions is not None: + for d in definitions: + definitions_set.append(d) + + if 'system_depends' in platform_info: + system_depends = platform_info['system_depends'] + if system_depends is not None: + for sd in system_depends: + system_depends_set.append(sd) + + if 'targets_paths' in self.parameters: + targets_paths = self.parameters['targets_paths'] + if targets_paths is not None: + for key, value in targets_paths.items(): + f.write('file(TO_NATIVE_PATH "%s" %s)\n' % (value, key)) + + # work_base = os.path.join(oldcwd, workspace, base_folder, plat) + work_base = self.get_install_directory(plat) + + if ('executable' in platform_info) and (target != 'dummy'): + # a target in mode executable, dont need install + install_3rdparty_dependencies = False + + if 'use_run_with_libs' in platform_info: + if utils.is_windows(): + f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.cmd" %s_LAUNCHER)\n' % target_upper) + else: + f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.sh" %s_LAUNCHER)\n' % target_upper) + + executable = platform_info['executable'] + if not self.check_parts_exists(work_base, package, target, executable, [('bin', True)], build_modes=['Release']): + errors += 1 + release_bin = self.search_library_noexcept(work_base, executable, 'bin') + + for suffix in ['', '_EXECUTABLE']: + if 'use_run_with_libs' in platform_info: + f.write('set(%s%s "${%s_LAUNCHER}" "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, target_upper, plat, utils.get_norm_path(release_bin, native=False))) + else: + f.write('set(%s%s "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, plat, utils.get_norm_path(release_bin, native=False))) + f.write('file(TO_NATIVE_PATH "${%s%s}" %s%s)\n' % (target_upper, suffix, target_upper, suffix)) + f.write('\n') + + if ('dynamic' in platform_info) and (target != 'dummy'): + + dynamic = platform_info['dynamic'] + + # add depend + if add_3rdparty_dependencies: + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) + + if utils.is_windows(): + if not self.check_parts_exists(work_base, package, target, dynamic, [('dll', True), ('lib', lib_provided), ('pdb', False)]): + errors += 1 + + debug_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + release_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + relwithdebinfo_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + minsizerel_dll = self.search_library_noexcept(work_base, dynamic, 'dll') + + debug_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + release_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + relwithdebinfo_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + minsizerel_lib = self.search_library_noexcept(work_base, dynamic, 'lib') + + try: + relwithdebinfo_pdb = self.search_library(work_base, dynamic, 'pdb') + except Exception as e: + logging.debug('exception searching lib: %s' % e) + relwithdebinfo_pdb = None + + try: + debug_pdb = self.search_library(work_base, dynamic, 'pdb') + except Exception as e: + logging.debug('exception searching lib: %s' % e) + debug_pdb = None + + f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # dll + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_dll, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_dll, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_dll, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_dll, native=False))) + f.write('\n') + + # lib + if not self.is_invalid_lib(debug_lib): + f.write('\tIMPORTED_IMPLIB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) + if not self.is_invalid_lib(release_lib): + f.write('\tIMPORTED_IMPLIB_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) + if not self.is_invalid_lib(relwithdebinfo_lib): + f.write('\tIMPORTED_IMPLIB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) + if not self.is_invalid_lib(minsizerel_lib): + f.write('\tIMPORTED_IMPLIB_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) + f.write('\n') + + # pdb + if not self.is_invalid_lib(debug_pdb): + f.write('\tIMPORTED_PDB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_pdb, native=False))) + + if not self.is_invalid_lib(relwithdebinfo_pdb): + f.write('\tIMPORTED_PDB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_pdb, native=False))) + + f.write(')\n') + else: + + if not self.check_parts_exists(work_base, package, target, dynamic, [('so', True)]): + errors += 1 + + debug_so = self.search_library_noexcept(work_base, dynamic, 'so') + release_so = self.search_library_noexcept(work_base, dynamic, 'so') + relwithdebinfo_so = self.search_library_noexcept(work_base, dynamic, 'so') + minsizerel_so = self.search_library_noexcept(work_base, dynamic, 'so') + + try: + debug_so_full = os.path.join(oldcwd, work_base, debug_so) + debug_soname = utils.get_soname(debug_so_full, env=env_modified) + logging.debug('detected soname in debug library: {}'.format(debug_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + debug_soname = None + + try: + release_so_full = os.path.join(oldcwd, work_base, release_so) + release_soname = utils.get_soname(release_so_full, env=env_modified) + logging.debug('detected soname in release library: {}'.format(release_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + release_soname = None + + try: + relwithdebinfo_so_full = os.path.join(oldcwd, work_base, relwithdebinfo_so) + relwithdebinfo_soname = utils.get_soname(relwithdebinfo_so_full, env=env_modified) + logging.debug('detected soname in relwithdebinfo library: {}'.format(relwithdebinfo_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + relwithdebinfo_soname = None + + try: + minsizerel_so_full = os.path.join(oldcwd, work_base, minsizerel_so) + minsizerel_soname = utils.get_soname(minsizerel_so_full, env=env_modified) + logging.debug('detected soname in minsizerel library: {}'.format(minsizerel_soname)) + except Exception as e: + logging.debug('exception searching lib: %s' % e) + minsizerel_soname = None + + f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # so + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_so, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_so, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_so, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_so, native=False))) + f.write('\n') + + # soname + if (debug_soname is not None) and os.path.exists( os.path.join(os.path.dirname(debug_so_full), debug_soname) ): + f.write('\tIMPORTED_SONAME_DEBUG "%s"\n' % utils.get_norm_path(debug_soname, native=False)) + + if (release_soname is not None) and os.path.exists( os.path.join(os.path.dirname(release_so_full), release_soname) ): + f.write('\tIMPORTED_SONAME_RELEASE "%s"\n' % utils.get_norm_path(release_soname, native=False)) + + if (relwithdebinfo_soname is not None) and os.path.exists( os.path.join(os.path.dirname(relwithdebinfo_so_full), relwithdebinfo_soname) ): + f.write('\tIMPORTED_SONAME_RELWITHDEBINFO "%s"\n' % utils.get_norm_path(relwithdebinfo_soname, native=False)) + + if (minsizerel_soname is not None) and os.path.exists( os.path.join(os.path.dirname(minsizerel_so_full), minsizerel_soname) ): + f.write('\tIMPORTED_SONAME_MINSIZEREL "%s"\n' % utils.get_norm_path(minsizerel_soname, native=False)) + + f.write(')\n') + + if ('static' in platform_info) and (target != 'dummy'): + + static = platform_info['static'] + + if not self.check_parts_exists(work_base, package, target, static, [('lib', True)]): + errors += 1 + + debug_lib = self.search_library_noexcept(work_base, static, 'lib') + release_lib = self.search_library_noexcept(work_base, static, 'lib') + relwithdebinfo_lib = self.search_library_noexcept(work_base, static, 'lib') + minsizerel_lib = self.search_library_noexcept(work_base, static, 'lib') + + if add_3rdparty_dependencies: + # register target + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) + + f.write('ADD_LIBRARY(%s STATIC IMPORTED)\n' % target_lower) + f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) + f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) + + # lib + f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) + f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) + f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) + f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) + + f.write(')\n') + + if install_3rdparty_dependencies and (target != 'dummy'): + f.write('cmaki_install_3rdparty(%s)\n' % target_lower) + f.write('\n') + + if self.has_library(platform_info) and (target != 'dummy'): + f.write('endif()\n\n') + + # print includes + if len(includes_set) > 0: + for d in list(set(includes_set)): + f.write('list(APPEND %s_INCLUDE_DIRS ${_DIR}/%s)\n' % (package_upper, d)) + + f.write('\n') + + if len(definitions_set) > 0: + for d in list(set(definitions_set)): + f.write('add_definitions(%s)\n' % d) + f.write('\n') + + if len(system_depends_set) > 0: + f.write('# begin system depends\n') + for sd in list(set(system_depends_set)): + f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, sd)) + f.write('# end system depends\n') + + # if self.get_generate_find_package(): + # f.write('# Depends of %s (%s)\n' % (self.get_package_name(), self.get_version())) + # for dep in self.get_depends_raw(): + # package_name = dep.get_package_name() + # if package_name not in depends_set: + # if dep.have_any_in_target(plat, 'dynamic', compiler_replace_maps): + # f.write('cmaki_find_package(%s)\n' % (package_name)) + # else: + # f.write('# cmaki_find_package(%s) # static package\n' % (package_name)) + # depends_set.add(package_name) + # f.write('\n') + + logging.info('----------------------------------------------------') + if self.user_parameters.fast: + logging.debug('skipping for because is in fast mode: "generate_cmakefiles"') + break + + return errors + + + def show_environment_vars(self, env_modified): + package = self.get_package_name() + logging.debug('------- begin print environment variables for compile %s ---------' % package) + for key, value in sorted(env_modified.items()): + logging.debug("%s=%s" % (key, value)) + logging.debug('------- end print environment variables for compile %s -----------' % package) + + + def get_first_environment(self, compiler_replace_maps): + for plat in platforms: + for _, _, _, _, _, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): + return env_modified + return os.environ.copy() + + + def safe_system(self, cmd, compiler_replace_maps): + return utils.safe_system(cmd, env=self.get_first_environment(compiler_replace_maps)) + + + def remove_packages(self): + # remove packages before + for plat in platforms: + prefix_package = os.path.join(self.user_parameters.prefix, '%s.tar.gz' % self.get_workspace(plat)) + prefix_package_cmake = os.path.join(self.user_parameters.prefix, '%s-cmakelib-%s.tar.gz' % (self.get_base_folder(), sys.platform)) + prefix_folder_cmake = os.path.join(self.user_parameters.third_party_dir, self.get_base_folder()) + logging.info("preremoving package %s" % prefix_package) + logging.info("preremoving package cmakefiles %s" % prefix_package_cmake) + logging.info("preremoving folder cmakefiles %s" % prefix_folder_cmake) + utils.tryremove(prefix_package) + utils.tryremove(prefix_package_cmake) + utils.tryremove_dir(prefix_folder_cmake) + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt new file mode 100644 index 0000000..a7a3475 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt @@ -0,0 +1,30 @@ +PROJECT(UNITEST_CMAKI_GENERATOR_${CMAKI_PLATFORM}_${CMAKE_BUILD_TYPE} CXX) +cmake_minimum_required(VERSION 3.0) + +include(cmaki) + +get_filename_component(BASEDIR "${CMAKE_CURRENT_LIST_FILE}" PATH) +set(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_BINARY_DIR}) +set(EXECUTABLE_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") +set(LIBRARY_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") + +foreach(PACKAGE_ITER ${FIND_PACKAGES}) + string(TOUPPER ${PACKAGE_ITER} PACKAGE_UPPER) + string(REGEX REPLACE "-" "_" PACKAGE_UPPER ${PACKAGE_UPPER}) + include("${DEPENDS_PATH}/3rdpartyversions/${PACKAGE_ITER}.cmake") + message("find_package in test: ${PACKAGE_UPPER}, version: ${${PACKAGE_UPPER}_REQUIRED_VERSION}") + cmaki_find_package(${PACKAGE_ITER} ${${PACKAGE_UPPER}_REQUIRED_VERSION}) +endforeach() +message("include dirs: ${CMAKI_INCLUDE_DIRS}") +message("libs to link in test: ${CMAKI_LIBRARIES}") + +foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) + include_directories(${INCLUDE_DIR}) +endforeach() +add_executable(test_${CMAKI_PLATFORM} ${UNITTEST_PATH}) +target_link_libraries(test_${CMAKI_PLATFORM} ${CMAKI_LIBRARIES}) +install(TARGETS test_${CMAKI_PLATFORM} DESTINATION "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") + +enable_testing() +add_test(NAME test_cmake_${CMAKI_PLATFORM} COMMAND test_${CMAKI_PLATFORM} WORKING_DIRECTORY "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py new file mode 100644 index 0000000..034813c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py @@ -0,0 +1,35 @@ +import os +import logging +import utils +from third_party import platforms + + +def upload(node, parameters, compiler_replace_maps): + + if parameters.server is None: + logging.warning('parameter --server is mandatory for upload, skipping upload') + else: + # pack tar.gz binaries + for plat in platforms: + prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % node.get_workspace(plat)) + if not os.path.isfile(prefix_package): + logging.error('error dont exitsts: {}'.format(prefix_package)) + return False + command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package) + node.ret += abs(utils.safe_system(command)) + + if node.ret != 0: + return False + + # pack cmakefiles + if not parameters.no_packing_cmakefiles: + for plat in platforms: + base_folder = node.get_base_folder() + prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (base_folder, plat)) + if not os.path.isfile(prefix_package_cmake): + logging.error('error dont exitsts: {}'.format(prefix_package_cmake)) + return False + command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package_cmake) + node.ret += abs(utils.safe_system(command)) + + return True diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py new file mode 100644 index 0000000..1d57c34 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py @@ -0,0 +1,48 @@ +import os +import sys +import logging +# import urllib2 +import argparse +import logging +# import poster +import requests + +logger = logging.getLogger(__name__) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--url', required=True, dest='url', help='url') + parser.add_argument('--filename', required=True, dest='filename', help='filename') + parser.add_argument('--field', dest='field', help='field name', default='uploaded') + parameters = parser.parse_args() + + if not os.path.exists(parameters.filename): + logging.error('dont exists %s' % parameters.filename) + sys.exit(1) + + with open(parameters.filename, 'rb') as f: + try: + response = requests.post(parameters.url, files={parameters.field: f}) + if response.status_code == 200: + sys.exit(0) + else: + logger.error('Error uploading file {} to {}'.format(parameters.filename, parameters.url)) + sys.exit(0) + except Exception as e: + logger.error('Exception uploading file {} to {}'.format(parameters.filename, parameters.url)) + sys.exit(0) + + # # Register the streaming http handlers with urllib2 + # poster.streaminghttp.register_openers() + # + # with open(parameters.filename, "rb") as f: + # datagen, headers = poster.encode.multipart_encode({parameters.field: f}) + # # Create the Request object + # request = urllib2.Request(parameters.url, datagen, headers) + # # Actually do the request, and get the response + # handler = urllib2.urlopen(request) + # logging.info( handler.read() ) + # if handler.getcode() == 200: + # sys.exit(0) + # else: + # sys.exit(1) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py new file mode 100644 index 0000000..767d218 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py @@ -0,0 +1,531 @@ +import os +import re +import sys +import shutil +import logging +import glob +import subprocess +import tarfile +import zipfile +import time +import contextlib +import hashlib +import yaml +import json +import errno +import multiprocessing +import fnmatch +from requests import get # to make GET request +from distutils.spawn import find_executable +try: + import bz2 + python_has_bz2 = True +except ImportError: + logging.debug('python module bz2 built-in is not available') + python_has_bz2 = False + + +class NotFoundProgram(Exception): + def __init__(self, msg): + self._msg = msg + def __repr__(self): + return "%s" % self._msg + + +def is_windows(): + return sys.platform.startswith("win") + + +def smart_merge(dict1, dict2): + assert(dict1 is not None) + assert(dict2 is not None) + for key, value in dict2.items(): + if isinstance(value, dict): + try: + dict1[key].update(value) + except KeyError: + dict1[key] = value + elif isinstance(value, list): + try: + dict1[key] += value + except KeyError: + dict1[key] = value + else: + dict1[key] = value + return dict1 + + +def apply_replaces(element, dictionary): + if isinstance(element, dict): + new = {} + for k,v in element.items(): + new[k] = apply_replaces(v, dictionary) + return new + elif isinstance(element, list): + new = [] + for e in element: + new.append( apply_replaces(e, dictionary) ) + return new + elif isinstance(element, bool): + return element + elif element is not None: + new_element = str(element) + for k,v in dictionary.items(): + # find in original, not in replaced + if str(element).find(k) != -1: + new_element = new_element.replace(k, v) + return new_element + else: + return None + + +def apply_replaces_vars(element, dictionary): + newdict = {} + for k,v in dictionary.items(): + newdict['$%s' % k] = v + newdict['${%s}' % k] = v + return apply_replaces(element, newdict) + + +def tryremove(filename): + try: + logging.debug('Removing file %s' % (filename)) + os.remove(filename) + except OSError: + pass + + +def _tryremove_dir(directory): + i = 0 + tries = 3 + while os.path.isdir(directory): + try: + shutil.rmtree(directory) + if not os.path.exists(directory): + i = tries + 1 + except OSError: + logging.debug('Fail removing %s. Retry %d/%d' % (directory, i + 1, tries)) + if i < tries: + time.sleep(1) + else: + raise Exception("Fail removing %s" % os.path.abspath(directory)) + finally: + i += 1 + + +def tryremove_dir(source): + logging.debug('Removing directory %s' % (source)) + if sys.platform.startswith('win'): + if os.path.isdir(source) and safe_system('rd /s /q %s' % source) != 0: + raise Exception('Fail removing %s' % source) + else: + _tryremove_dir(source) + + +def tryremove_dir_empty(source): + try: + os.rmdir(source) + except OSError as ex: + if ex.errno != errno.ENOTEMPTY: + logging.debug('Removing empty directory %s' % (source)) + + +def download_from_url(url, file_name): + with open(file_name, "wb") as file: + response = get(url) + file.write(response.content) + + +def setup_logging(level, logname): + format_console_log = '%(asctime)s %(levelname)-7s %(message)s' + format_date = '%H-%M:%S' + dirlog = os.path.dirname(logname) + if dirlog != '': + trymkdir(dirlog) + logger = logging.getLogger() + logger.setLevel(logging.DEBUG) + if(len(logging.root.handlers) == 1): + logging.root.removeHandler( logging.root.handlers[0] ) + handler = logging.StreamHandler() + handler.setLevel(level) + handler.setFormatter(logging.Formatter(format_console_log, format_date)) + logger.addHandler(handler) + handler2 = logging.FileHandler(logname) + handler2.setLevel(logging.DEBUG) + handler2.setFormatter(logging.Formatter(format_console_log, format_date)) + logger.addHandler(handler2) + + +def prompt_yes_no(default = False): + # raw_input returns the empty string for "enter" + yes = set(['yes','y', 'ye', '']) + no = set(['no','n']) + + choice = raw_input().lower() + if choice in yes: + return True + elif choice in no: + return False + else: + sys.stdout.write("Please respond with 'yes' or 'no'") + return default + + +def show_element(element, deep = 0): + if isinstance(element, dict): + for k,v in element.items(): + logging.info("%s<%s>" % ('\t'*deep, k)) + show_element(v, deep + 1) + elif isinstance(element, list): + for e in element: + show_element(e, deep + 1) + else: + logging.info('%s%s' % ('\t'*deep, element)) + + + +def rec_glob(rootdir, pattern): + + # logging.info('---> {} [START]'.format(rootdir)) + result = [] + for root, dirs, files in os.walk(rootdir): + # logging.info('---> {}'.format(root)) + for file in files: + # logging.info('---> {}'.format(file)) + if fnmatch.fnmatch(file, pattern): + # logging.info('---> {} [MATCH]'.format(file)) + result.append(os.path.join(root, file)) + return result + + +def trymkdir(directory): + if not os.path.exists( directory ): + os.makedirs( directory ) + + +def move_folder_recursive(source, destiny): + if not os.path.exists(source): + raise Exception('Error in move_folder_recursive: source not exists: %s' % source) + logging.debug('move recursive from {} to {}'.format(source, destiny)) + for archive in os.listdir(source): + # ignore some stuff + if archive.startswith('.git') or archive.startswith('.svn'): + continue + archive2 = os.path.join(source, archive) + destiny2 = os.path.join(destiny, archive) + if(os.path.isdir(archive2)): + move_folder_recursive(archive2, destiny2) + else: + if os.path.isfile(destiny2): + logging.debug('Replacing file %s' % destiny2) + tryremove(destiny2) + # try create destiny directory + trymkdir( os.path.dirname(destiny2) ) + # move file + shutil.move(archive2, destiny2) + + +def copy_folder_recursive(source, destiny): + if not os.path.exists(source): + raise Exception('Error in copy_folder_recursive: source not exists: %s' % source) + for archive in os.listdir(source): + # ignore some stuff + if archive.startswith('.git') or archive.startswith('.svn'): + continue + archive2 = os.path.join(source, archive) + destiny2 = os.path.join(destiny, archive) + if(os.path.isdir(archive2)): + copy_folder_recursive(archive2, destiny2) + else: + if os.path.isfile(destiny2): + logging.debug('Replacing file %s' % destiny2) + tryremove(destiny2) + # try create destiny directory + trymkdir( os.path.dirname(destiny2) ) + # copy file (and stat) + shutil.copy2(archive2, destiny2) + + +def extract_file(path, to_directory, environment): + + # convert to absolute + logging.debug('Extract file %s' % path) + path = os.path.abspath(path) + + if path.endswith('.zip'): + opener, mode = zipfile.ZipFile, 'r' + # elif path.endswith('.tar.gz') or path.endswith('.tgz'): + # opener, mode = tarfile.open, 'r:gz' + elif path.endswith('.tar.gz') or path.endswith('.tgz'): + # python have problems with big .tar.gz in linux -_- + if is_windows(): + with working_directory(to_directory): + logging.debug('Using cmake -E tar for package: %s' % path) + ret = safe_system('cmake -E tar zxvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + else: + with working_directory(to_directory): + logging.debug('Using system tar for package: %s' % path) + ret = safe_system('tar zxvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + elif path.endswith('.tar.bz2') or path.endswith('.tbz'): + # python have problems with big .tar.bz2 in windows + if is_windows(): + with working_directory(to_directory): + logging.debug('Using cmake -E tar for package: %s' % path) + ret = safe_system('cmake -E tar xvf %s' % path, env=environment) + ok = (ret == 0) + # be careful, early return + return ok + else: + if python_has_bz2: + opener, mode = tarfile.open, 'r:bz2' + else: + logging.warning('Not using python-bz2 module for uncompress: %s in %s' % (path, to_directory)) + with working_directory(to_directory): + logging.debug('Using bunzip2 and tar for package: %s' % path) + ret = safe_system('bunzip2 -c %s | tar xvf -' % path, env=environment) + ok = (ret == 0) + + # be careful, early return + return ok + elif path.endswith('.tar.xz'): + # needd "xz" + with working_directory(to_directory): + ret = safe_system('tar xpvf %s' % path, env=environment) + ok = (ret == 0) + return ok + else: + raise ValueError("Could not extract `%s` as no appropriate extractor is found" % path) + + # create directory if not exists + trymkdir(to_directory) + with working_directory(to_directory): + file = opener(path, mode) + try: + file.extractall() + finally: + file.close() + return True + + +# Copy Paste from run_tests (handler.py) +def detect_ncpus(): + return multiprocessing.cpu_count() + + +def get_norm_path(pathfile, native=True): + if native and is_windows(): + return pathfile.replace('/', '\\') + else: + return pathfile.replace('\\', '/') + + +def get_filename_no_ext(filename): + return os.path.splitext(filename)[0] + + +def get_soname(libfile, env=os.environ.copy()): + + if is_windows(): + logging.error('get_soname is not supported in windows') + return + + cmd = ['objdump', "-p", libfile] + for line in get_stdout(cmd, env, 'objdump'): + if line.find('SONAME') != -1: + return line.split()[1] + raise Exception('No soname detected in %s' % libfile) + + +def get_needed(libfile, env=os.environ.copy()): + + if is_windows(): + logging.error('get_needed is not supported in windows') + return + + cmd = ['objdump', "-p", libfile] + for line in get_stdout(cmd, env, 'objdump'): + if line.find('NEEDED') != -1: + yield line.split()[1] + + +def get_real_home(): + if sys.platform.startswith("sun"): + # problems launching subshell in solaris + return os.environ['HOME'] + elif sys.platform.startswith("linux"): + cmd = "REAL_HOME=$(cd $HOME && pwd -P) && echo $REAL_HOME" + for line in get_stdout(cmd): + return line + return os.environ['HOME'] + else: + return os.path.expanduser('~') + + +@contextlib.contextmanager +def working_directory(path): + prev_cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(prev_cwd) + + +def walklevel(some_dir, level=1): + ''' + os.walk() with max level + ''' + some_dir = some_dir.rstrip(os.path.sep) + if not os.path.isdir(some_dir): + logging.error('%s is not folder' % some_dir) + sys.exit(1) + + num_sep = some_dir.count(os.path.sep) + for root, dirs, files in os.walk(some_dir): + yield root, dirs, files + num_sep_this = root.count(os.path.sep) + if num_sep + level <= num_sep_this: + del dirs[:] + + +def get_revision_svn(repo, path_svn='svn', env=os.environ.copy()): + ''' + This command need svn in PATH + ''' + if os.path.exists(repo): + with working_directory(repo): + env_copy = env.copy() + svn_bin = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'bin')) + svn_lib = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'lib')) + env_copy['PATH'] = "%s:%s" % (svn_bin, env_copy['PATH']) + env_copy['LD_LIBRARY_PATH'] = "%s:%s" % (svn_lib, env_copy['LD_LIBRARY_PATH']) + cmd = "%s info" % path_svn + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines=True, env=env_copy) + data, err = p.communicate() + + # clean stdout + data = [line.strip() for line in data.split('\n') if line.strip()] + + for line in data: + separator = 'Last Changed Rev: ' + if line.startswith(separator): + return int(line[len(separator):]) + else: + separator = 'Revisi.n del .ltimo cambio: ' + if re.match(separator, line) is not None: + return int(line[len(separator):]) + return -1 + + +def verbose(parameters, msg): + if parameters.verbose > 0: + logging.info(msg) + + +def superverbose(parameters, msg): + if parameters.verbose > 1: + logging.info(msg) + + +def hyperverbose(parameters, msg): + if parameters.verbose > 2: + logging.info(msg) + + +def md5sum(filename, blocksize=65536): + hash = hashlib.md5() + with open(filename, "rb") as f: + for block in iter(lambda: f.read(blocksize), b""): + hash.update(block) + return hash.hexdigest() + + +def serialize(pythonDict, fileName): + serialize_json(pythonDict, fileName) + + +def deserialize(fileName): + return deserialize_json(fileName) + + +def serialize_yaml(pythonDict, fileName): + serialiedData = yaml.dump(pythonDict, default_flow_style=True) + with open(fileName, 'wt') as f: + f.write(serialiedData) + + +def deserialize_yaml(fileName): + with open(fileName, 'rt') as f: + stringData = f.read() + return yaml.load(stringData) + + +def serialize_json(pythonDict, fileName): + serialiedData = json.dumps(pythonDict) + with open(fileName, 'wt') as f: + f.write(serialiedData) + + +def deserialize_json(fileName): + with open(fileName, 'rt') as f: + stringData = f.read() + return json.loads(stringData) + + +def get_stdout(cmd, env=os.environ.copy(), program_required=None): + if isinstance(cmd, list): + cmd = ' '.join(cmd) + # logging.debug('launch cmd: %s' % cmd) + + # search executable + ok = True + if program_required is not None: + ok = find_executable(program_required, env['PATH']) + if ok: + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) + data, err = p.communicate() + data = [line.strip() for line in data.split('\n') if line.strip()] + for line in data: + # logging.debug('[out cmd] %s' % line) + yield line + else: + raise NotFoundProgram('Not found program %s, for execute: %s' % (program_required, cmd)) + + +def safe_system(cmd, env=None): + if env is None: + env = os.environ.copy() + logging.debug("exec command: %s" % cmd) + + if 'CMAKI_PRINT' in env: + try: + return subprocess.call('{}'.format(cmd), env=env, shell=True) + except OSError as e: + logging.warning(str(e)) + return -1 + else: + p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) + data, err = p.communicate() + data = [line for line in data.split('\n')] + if p.returncode != 0: + logging.error("begin@output: %s" % cmd) + for line in data: + if p.returncode != 0: + logging.warning(line) + else: + logging.debug(line) + if p.returncode != 0: + logging.error("end@output: %s" % cmd) + return p.returncode + + +if __name__ == '__main__': + print(rec_glob('.', '*.yml')) + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml new file mode 100644 index 0000000..cf179bc --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml @@ -0,0 +1,12 @@ +language: c +services: docker +os: linux +env: + - IMAGE=linux-x64 + # - IMAGE=windows-x86 + - IMAGE=windows-x64 + # - IMAGE=linux-x86 + - IMAGE=android-arm +# - IMAGE=browser-asmjs +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt new file mode 100644 index 0000000..5cd8b41 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt @@ -0,0 +1,6 @@ +project(cmaki_identifier_project CXX) +cmake_minimum_required(VERSION 3.0) +set(CMAKE_CXX_STANDARD 14) +include_directories(boostorg_predef/include) +enable_testing() +add_subdirectory(tests) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md new file mode 100644 index 0000000..e49baa2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md @@ -0,0 +1,19 @@ +# identify your platform + +gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_identifier.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_identifier) + +MSVC 2015: [![Build status](https://ci.appveyor.com/api/projects/status/tljl8xip6m8joi86?svg=true)](https://ci.appveyor.com/project/makiolo/cmaki-identifier) + +## travis: +- linux_64_glibc_2.19-gcc_4-debug +- linux_64_glibc_2.19-gcc_4-release +- linux_64_glibc_2.19-clang_3-debug +- linux_64_glibc_2.19-clang_3-release +- macos_64-clang_7-debug +- macos_64-clang_7-release + +## appveyor: +- windows_32-msvc_2015-debug +- windows_32-msvc_2015-release +- windows_64-msvc_2015-debug +- windows_64-msvc_2015-release diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh new file mode 100644 index 0000000..ebffa54 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# if [ $# -e 0 ]; then +# echo $0: [ERROR], usage: ./cmaki_emulator.sh +# exit 1 +# fi + +export DIRPROGRAM="$( cd "$( dirname "$1" )" >/dev/null && pwd )" +export BASENAMEPROGRAM=$(basename "$1") +export CMAKI_PWD="${CMAKI_PWD:-$(pwd)}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" +export LD_LIBRARY_PATH=$(pwd):$LD_LIBRARY_PATH + +if [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x86" ]]; then + cd ${DIRPROGRAM} + wine ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x64" ]]; then + cd ${DIRPROGRAM} + wine ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/android-arm" ]]; then + cd ${DIRPROGRAM} + unset LD_LIBRARY_PATH + qemu-arm -L /usr/arm-linux-gnueabi ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv6" ]]; then + cd ${DIRPROGRAM} + qemu-arm ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv7" ]]; then + cd ${DIRPROGRAM} + qemu-arm ./$BASENAMEPROGRAM "${@:2}" +elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/browser-asmjs" ]]; then + cd ${DIRPROGRAM} + nodejs ./$BASENAMEPROGRAM "${@:2}" +else + $CMAKI_EMULATOR "$1" "${@:2}" +fi + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake new file mode 100644 index 0000000..7a50cc9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake @@ -0,0 +1,12 @@ +set(PLATFORM "") +set(dirscript ${CMAKE_CURRENT_LIST_DIR}) +IF(WIN32) + set(executable cmaki_identifier.exe) +else() + set(executable cmaki_identifier.sh) +endif() +execute_process(COMMAND ${dirscript}/${executable} + OUTPUT_VARIABLE PLATFORM + OUTPUT_STRIP_TRAILING_WHITESPACE) +MESSAGE("${PLATFORM}") + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh new file mode 100755 index 0000000..371107b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh @@ -0,0 +1,14 @@ +#!/bin/bash +export DIRSCRIPT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_PWD="${CMAKI_PWD:-$DIRSCRIPT}/.." +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" + +if [ -f "cmaki_identifier.exe" ]; then + $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier.exe +else + $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier +fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do new file mode 100644 index 0000000..4452ece --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do @@ -0,0 +1,3 @@ +#!/bin/bash +function npm-do { (PATH=$(npm bin):$PATH; eval $@;) } +# set -x PATH ./node_modules/.bin $PATH diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json new file mode 100644 index 0000000..ecdd629 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json @@ -0,0 +1,30 @@ +{ + "name": "cmaki_identifier", + "version": "1.0.0", + "description": "identify your platform", + "scripts": { + "clean": "cmaki clean", + "setup": "cmaki setup", + "compile": "cmaki compile", + "install": "cmaki setup && cmaki compile", + "test": "cmaki test", + "upload": "cmaki upload" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/makiolo/cmaki_identifier.git" + }, + "keywords": [ + "c++", + "identifier" + ], + "author": "Ricardo Marmolejo García", + "license": "MIT", + "bugs": { + "url": "https://github.com/makiolo/cmaki_identifier/issues" + }, + "homepage": "https://github.com/makiolo/cmaki_identifier#readme", + "devDependencies": { + "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd new file mode 100644 index 0000000..36bd277 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd @@ -0,0 +1,7 @@ +@echo off +if exist "boostorg_predef" ( + rmdir /s /q boostorg_predef +) +git clone -q https://github.com/boostorg/predef.git boostorg_predef + +..\cmaki_scripts\setup.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh new file mode 100644 index 0000000..4e1af5c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +if [ -d "boostorg_predef" ]; then + rm -Rf boostorg_predef +fi +git clone -q https://github.com/boostorg/predef.git boostorg_predef + +../cmaki_scripts/setup.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt new file mode 100644 index 0000000..b806a9b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt @@ -0,0 +1,33 @@ +if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") +endif() + +add_executable(cmaki_identifier cmaki_identifier.cpp) + +install(TARGETS cmaki_identifier DESTINATION $ENV{CMAKI_INSTALL}) +install(FILES ../cmaki_identifier.cmake DESTINATION $ENV{CMAKI_INSTALL}) +install(PROGRAMS ../cmaki_identifier.sh DESTINATION $ENV{CMAKI_INSTALL}) +install(PROGRAMS ../cmaki_emulator.sh DESTINATION $ENV{CMAKI_INSTALL}) +add_test( + NAME all + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME os + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME arch + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME compiler + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +set_tests_properties(os PROPERTIES ENVIRONMENT "CMAKI_INFO=OS") +set_tests_properties(arch PROPERTIES ENVIRONMENT "CMAKI_INFO=ARCH") +set_tests_properties(compiler PROPERTIES ENVIRONMENT "CMAKI_INFO=COMPILER") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp new file mode 100644 index 0000000..6cb91e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp @@ -0,0 +1,345 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __EMSCRIPTEN__ +#include +#endif + +#define STR_HELPER(x) #x +#define STR(x) STR_HELPER(x) + +#ifdef _WIN32 + +// problems with variadic in windows +std::string get_environment(const char* varname, const char* default_) +{ + char* varname_str = getenv(varname); + std::string value_str; + if(varname_str == NULL) + value_str = default_; + else + value_str = varname_str; + return value_str; +} + +#else + +template +std::string get_environment(T default_) +{ + return default_; +} + +template +std::string get_environment(T varname, Args ... others) +{ + char* varname_str = getenv(varname); + std::string value_str; + if(varname_str == NULL) + value_str = get_environment(others...); + else + value_str = varname_str; + return value_str; +} + +#endif + +int main() +{ +#ifdef __EMSCRIPTEN__ + #define OPERATIVE_SYSTEM "javascript" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_WINDOWS + #define OPERATIVE_SYSTEM "windows" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_ANDROID + #define OPERATIVE_SYSTEM "android" + #define OPERATIVE_RESTRICTION "_api_" STR(__ANDROID_API__) +#elif BOOST_OS_LINUX + #define OPERATIVE_SYSTEM "linux" + #ifdef __GLIBC__ + #define OPERATIVE_RESTRICTION "_glibc_" STR(__GLIBC__) "." STR(__GLIBC_MINOR__) + #else + #define OPERATIVE_RESTRICTION "" + #endif +#elif BOOST_OS_MACOS + #define OPERATIVE_SYSTEM "macos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_AIX + #define OPERATIVE_SYSTEM "aix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_AMIGAOS + #define OPERATIVE_SYSTEM "amigaos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_BEOS + #define OPERATIVE_SYSTEM "beos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_BSD + #if BOOST_OS_BSD_DRAGONFLY + #define OPERATIVE_SYSTEM "dragonfly_bsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_FREE + #define OPERATIVE_SYSTEM "freebsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_BSDI + #define OPERATIVE_SYSTEM "bsdios" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_NET + #define OPERATIVE_SYSTEM "netbsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_OPEN + #define OPERATIVE_SYSTEM "openbsd" + #define OPERATIVE_RESTRICTION "" + #endif +#elif BOOST_OS_CYGWIN + #define OPERATIVE_SYSTEM "cygwin" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_HPUX + #define OPERATIVE_SYSTEM "hpux" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_IRIX + #define OPERATIVE_SYSTEM "irix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_OS400 + #define OPERATIVE_SYSTEM "os400" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_QNX + #define OPERATIVE_SYSTEM "qnx" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_SOLARIS + #define OPERATIVE_SYSTEM "solaris" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_UNIX + #define OPERATIVE_SYSTEM "unix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_SVR4 + #define OPERATIVE_SYSTEM "svr4" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_VMS + #define OPERATIVE_SYSTEM "vms" + #define OPERATIVE_RESTRICTION "" +#else + #define OPERATIVE_SYSTEM "unknown_so" + #define OPERATIVE_RESTRICTION "" +#endif + +#if BOOST_ARCH_X86 + #if BOOST_ARCH_X86_32 + #define ARCHITECTURE "32" + #elif BOOST_ARCH_X86_64 + #define ARCHITECTURE "64" + #else + #define ARCHITECTURE "unknown_arch" + #endif +#elif BOOST_ARCH_ARM + #define ARCHITECTURE "arm" +#elif BOOST_ARCH_ALPHA + #define ARCHITECTURE "alpha" +#elif BOOST_ARCH_BLACKFIN + #define ARCHITECTURE "blackfin" +#elif BOOST_ARCH_CONVEX + #define ARCHITECTURE "convex" +#elif BOOST_ARCH_IA64 + #define ARCHITECTURE "ia64" +#elif BOOST_ARCH_M68K + #define ARCHITECTURE "m68k" +#elif BOOST_ARCH_MIPS + #define ARCHITECTURE "mips" +#elif BOOST_ARCH_PARISK + #define ARCHITECTURE "parisk" +#elif BOOST_ARCH_PPC + #define ARCHITECTURE "ppc" +#elif BOOST_ARCH_PYRAMID + #define ARCHITECTURE "pyramid" +#elif BOOST_ARCH_RS6000 + #define ARCHITECTURE "rs6000" +#elif BOOST_ARCH_SPARC + #define ARCHITECTURE "sparc" +#elif BOOST_ARCH_SH + #define ARCHITECTURE "sh" +#elif BOOST_ARCH_SYS370 + #define ARCHITECTURE "sys370" +#elif BOOST_ARCH_SYS390 + #define ARCHITECTURE "sys390" +#elif BOOST_ARCH_Z + #define ARCHITECTURE "z" +#else + #define ARCHITECTURE "unknown_arch" +#endif + +#if BOOST_COMP_MSVC + #define COMPILER "msvc" + #if _MSC_VER == 1911 + #define COMPILER_RESTRICTION "_2017" + #elif _MSC_VER == 1910 + #define COMPILER_RESTRICTION "_2017" + #elif _MSC_VER == 1900 + #define COMPILER_RESTRICTION "_2015" + #elif _MSC_VER == 1800 + #define COMPILER_RESTRICTION "_2013" + #elif _MSC_VER == 1700 + #define COMPILER_RESTRICTION "_2012" + #elif _MSC_VER == 1600 + #define COMPILER_RESTRICTION "_2010" + #elif _MSC_VER == 1500 + #define COMPILER_RESTRICTION "_2008" + #elif _MSC_VER == 1400 + #define COMPILER_RESTRICTION "_2005" + #elif _MSC_VER == 1310 + #define COMPILER_RESTRICTION "_2003" + #else + #define COMPILER_RESTRICTION "_msc_ver_" STR(_MSC_VER) + #endif +#elif BOOST_COMP_GNUC + #define COMPILER "gcc" + #define COMPILER_RESTRICTION "_" STR(__GNUC__) +#elif BOOST_COMP_CLANG + #define COMPILER "clang" + #define COMPILER_RESTRICTION "_" STR(__clang_major__) +#elif BOOST_COMP_BORLAND + #define COMPILER "borland" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_COMO + #define COMPILER "comeau" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DEC + #define COMPILER "dec" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DIAB + #define COMPILER "diab" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DMC + #define COMPILER "dmc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SYSC + #define COMPILER "sysc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_EDG + #define COMPILER "edg" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PATH + #define COMPILER "path" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_GCCXML + #define COMPILER "gccxml" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_GHS + #define COMPILER "ghs" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_HPACC + #define COMPILER "hpacc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_IAR + #define COMPILER "iar" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_IBM + #define COMPILER "ibm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_INTEL + #define COMPILER "intel" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_KCC + #define COMPILER "kcc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_LLVM + #define COMPILER "llvm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_HIGHC + #define COMPILER "highc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MWERKS + #define COMPILER "mwerks" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MRI + #define COMPILER "mri" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MPW + #define COMPILER "mrw" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PALM + #define COMPILER "palm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PGI + #define COMPILER "pgi" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SGI + #define COMPILER "sgi" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SUNPRO + #define COMPILER "sunpro" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_TENDRA + #define COMPILER "tendra" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_WATCOM + #define COMPILER "watcom" + #define COMPILER_RESTRICTION "" +#else + #define COMPILER "unknown_compiler" + #define COMPILER_RESTRICTION "" +#endif + + // structure (3 chunks joined with "-"): + // 1. platform (2 or 3 chunks joined with "_") + // 1.1. operative system (string but forbidden "_" and "-") + // 1.2. architecture (string but forbidden "_" and "-") + // 1.3. (optional) operative system restriction (is explanation and version joined with "_") + // 1.3.1. what is this restriction (string but forbidden "_" and "-") + // 1.3.2. version (1-4 chunks joined with ".") + // 2. compiler (1 or 2 chunks joined with "_") + // 2.1. compiler (string but forbidden "_" and "-") + // 2.2. (optional) compiler restriction (is version) + // 2.2.1. version (1-4 chunks joined with ".") + // 3. build mode (1 or 2 chunks joined with "_") + // 3.1. build_mode (string but forbidden "_" and "-") + // 3.2. (optional) build mode restrictions + + std::string build_mode = get_environment("MODE", "Debug"); + std::string cmaki_entropy = get_environment("CMAKI_ENTROPY", ""); + std::string cmaki_info = get_environment("CMAKI_INFO", "ALL"); + + std::transform(build_mode.begin(), build_mode.end(), build_mode.begin(), ::tolower); + std::transform(cmaki_entropy.begin(), cmaki_entropy.end(), cmaki_entropy.begin(), ::tolower); + + // TODO: mas consultas + // Arquitectura, sólo el numero: 32 o 64 + // Compilador: COMPILER + COMPILER_RESTRICTION + // Todo: OPERATIVE_SYSTEM + "_" + ARCHITECTURE + OPERATIVE_RESTRICTION + "-" + COMPILER + COMPILER_RESTRICTION + "-" + build_mode + cmaki_entropy + if(cmaki_info == "OS") + { + std::cout << OPERATIVE_SYSTEM + << std::endl; + } + else if(cmaki_info == "ARCH") + { + std::cout << ARCHITECTURE + << std::endl; + } + else if(cmaki_info == "COMPILER") + { + std::cout << COMPILER + << COMPILER_RESTRICTION + << std::endl; + } + else // if(cmaki_info == "ALL") + { + std::cout << OPERATIVE_SYSTEM + << "_" << ARCHITECTURE + << OPERATIVE_RESTRICTION + << "-" << COMPILER + << COMPILER_RESTRICTION + << "-" << build_mode; + if(cmaki_entropy.length() > 0) + { + std::cout << "-" << cmaki_entropy; + } + std::cout << std::endl; + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml new file mode 100644 index 0000000..44de95c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml @@ -0,0 +1,5 @@ +language: c +services: docker +os: linux +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE new file mode 100644 index 0000000..53546c1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md new file mode 100644 index 0000000..e227c42 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md @@ -0,0 +1,9 @@ +# cmaki_scripts +scripts for cmaki: compile, tests, upload .... + +# windows problems +``` +$ set PATH=%CD%\node_modules\cmaki_scripts;%PATH% +$ echo %PATHEXT% +.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC;.PY;.JS; +``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd new file mode 100644 index 0000000..72202c8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd @@ -0,0 +1,15 @@ +@echo off +IF EXIST node_modules\cmaki ( + echo . +) else ( + md node_modules\cmaki + cd node_modules && git clone -q https://github.com/makiolo/cmaki.git && cd .. + cd node_modules/cmaki && rm -Rf .git && cd ..\.. +) +IF EXIST node_modules\cmaki_generator ( + echo . +) else ( + md node_modules\cmaki_generator + cd node_modules && git clone -q https://github.com/makiolo/cmaki_generator.git && cd .. + cd node_modules/cmaki_generator && rm -Rf .git && cd ..\.. +) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd new file mode 100644 index 0000000..0a2db63 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd @@ -0,0 +1,40 @@ +@echo off + +echo [0/3] preinstall +set PATH=%CMAKI_PWD%\node_modules\cmaki_scripts;%PATH% +env | sort + +powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/cmaki_depends.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" +if %errorlevel% neq 0 exit /b %errorlevel% + +if exist package.json ( + + echo [1/3] prepare + :: call ncu -u + npm cache clean --force + + echo [2/3] compile + npm install + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [3/3] run tests + npm test + if %errorlevel% neq 0 exit /b %errorlevel% + +) else ( + + echo [1/3] prepare + if exist node_modules\cmaki_scripts (rmdir /s /q node_modules\cmaki_scripts) + powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/bootstrap.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [2/3] compile + call node_modules\cmaki_scripts\install.cmd + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [3/3] run tests + call node_modules\cmaki_scripts\test.cmd + if %errorlevel% neq 0 exit /b %errorlevel% + +) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh new file mode 100644 index 0000000..066caae --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -e + +export NPP_CACHE="${NPP_CACHE:-FALSE}" + +env | sort + +if [[ -d "bin" ]]; then + rm -Rf bin +fi + +if [[ -d "artifacts" ]]; then + rm -Rf artifacts +fi + +if [[ -d "node_modules" ]]; then + rm -Rf node_modules +fi + +if [ -f "artifacts.json" ]; then + rm artifacts.json +fi + +if [ -f "package.json" ]; then + + echo [1/2] compile + npm install + + echo [2/2] run tests + npm test +else + echo [1/2] compile + ./node_modules/cmaki_scripts/setup.sh && ./node_modules/cmaki_scripts/compile.sh + + echo [2/2] run tests + ./node_modules/cmaki_scripts/test.sh +fi + +if [ -f "cmaki.yml" ]; then + echo [3/3] upload artifact + if [ -f "package.json" ]; then + npm run upload + else + ./node_modules/cmaki_scripts/upload.sh + fi +fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd new file mode 100644 index 0000000..5f83632 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd @@ -0,0 +1,3 @@ +@echo off +rd /s /q artifacts 2> NUL +rd /s /q coverage 2> NUL diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh new file mode 100755 index 0000000..b204603 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh @@ -0,0 +1,16 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CC="${CC:-gcc}" +export MODE="${MODE:-Debug}" +export COMPILER_BASENAME=$(basename ${CC}) + +if [ -d $COMPILER_BASENAME/$MODE ]; then + rm -Rf $COMPILER_BASENAME/$MODE +fi +if [ "$NPP_CACHE" == "FALSE" ]; then + rm -Rf artifacts 2> /dev/null +fi +rm -Rf coverage 2> /dev/null +rm -Rf gcc 2> /dev/null +rm -Rf clang 2> /dev/null + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd new file mode 100644 index 0000000..674bfd5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd @@ -0,0 +1,22 @@ +@ECHO OFF +SET DIRWORK=%~dp0 + +IF NOT EXIST "%NODE%" ( + IF DEFINED NODEHOME ( + IF EXIST "%NODEHOME%\node.exe" ( + SET NODE="%NODEHOME%\node.exe" + ) ELSE ( + ECHO Error: Missing node.exe from node home: "%NODEHOME%" + ) + ) ELSE ( + IF EXIST "C:\Program Files\nodejs\node.exe" ( + ECHO WARNING: Defaulting NODE configuration + SET NODE=C:\Program Files\nodejs\node.exe + SET NODEHOME=C:\Program Files\nodejs + ) ELSE ( + ECHO ERROR: NODE configuration unavailable! + ) + ) +) + +"%NODE%" %DIRWORK%\cmaki.js %* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js new file mode 100755 index 0000000..e204fd7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js @@ -0,0 +1,193 @@ +#!/usr/bin/env node + +var os = require('os') +var fs = require('fs'); +var path = require('path') +var shelljs = require('shelljs'); +var is_win = (os.platform() === 'win32'); + +if(!process.env.CMAKI_PWD) +{ + if (fs.existsSync(path.join("..", "..", "node_modules", "npm-mas-mas"))) { + shelljs.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); + process.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); + } else { + shelljs.env['CMAKI_PWD'] = path.join(process.cwd()); + process.env['CMAKI_PWD'] = path.join(process.cwd()); + } +} +else +{ + shelljs.env['CMAKI_PWD'] = process.env['CMAKI_PWD']; +} + +if(!process.env.CMAKI_INSTALL) +{ + shelljs.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); + process.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); +} +else +{ + shelljs.env['CMAKI_INSTALL'] = process.env['CMAKI_INSTALL']; +} + +if(!process.env.NPP_SERVER) +{ + shelljs.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' + process.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' +} +else +{ + shelljs.env['NPP_SERVER'] = process.env['NPP_SERVER']; +} + +if(!process.env.NPP_CACHE) +{ + shelljs.env['NPP_CACHE'] = 'TRUE' + process.env['NPP_CACHE'] = 'TRUE' +} +else +{ + shelljs.env['NPP_CACHE'] = process.env['NPP_CACHE']; +} + +if(is_win) +{ + cmaki_identifier = 'cmaki_identifier.cmd' +} +else +{ + cmaki_identifier = 'cmaki_identifier.sh' +} + + +// no check in cmaki_identifier for avoid recursion +if( process.cwd().replace(/\\/g, "/").search("/cmaki_identifier") == -1 ) +{ + if(!fs.existsSync( path.join( process.env['CMAKI_INSTALL'], cmaki_identifier) )) + { + dir_identifier = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_identifier'); + + backup1 = shelljs.env['CMAKI_PWD']; + backup2 = process.env['CMAKI_PWD']; + + shelljs.env['CMAKI_PWD'] = dir_identifier; + process.env['CMAKI_PWD'] = dir_identifier; + + shelljs.cd( dir_identifier ); + + if (shelljs.exec('npm install').code !== 0) { + shelljs.echo('Error detecting compiler (compiling cmaki_identifier ...)'); + shelljs.exit(1); + } + + shelljs.env['CMAKI_PWD'] = backup1; + process.env['CMAKI_PWD'] = backup2; + } +} + +if(!process.env.MODE) +{ + shelljs.env['MODE'] = 'Debug'; + process.env['MODE'] = 'Debug'; +} +else +{ + shelljs.env['MODE'] = process.env['MODE']; +} + +function trim(s) +{ + return ( s || '' ).replace( /^\s+|\s+$/g, '' ); +} + +var environment_vars = []; +next_is_environment_var = false; +process.argv.forEach(function(val, index, array) +{ + if(next_is_environment_var) + { + environment_vars.push(val); + } + next_is_environment_var = (val == '-e'); +}); +environment_vars.forEach(function(val, index, array) +{ + var chunks = val.split("="); + if( chunks.length == 2 ) + { + shelljs.env[chunks[0]] = chunks[1]; + process.env[chunks[0]] = chunks[1]; + } + else + { + console.log("Error in -e with value: " + val); + } +}); + +//////////////////////////////////////////////////////////////////////////////// +// change cwd +shelljs.cd( process.env['CMAKI_PWD'] ); +//////////////////////////////////////////////////////////////////////////////// + + +var dir_script; +var script = process.argv[2]; +if (is_win) +{ + if(fs.existsSync(path.join(process.cwd(), script+".cmd"))) + { + dir_script = process.cwd(); + } + else + { + dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); + } +} +else +{ + if(fs.existsSync(path.join(process.cwd(), script+".sh"))) + { + dir_script = process.cwd(); + } + else + { + dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); + } +} + +if (is_win) +{ + script_execute = path.join(dir_script, script+".cmd"); + exists = fs.existsSync(script_execute); + caller_execute = "cmd /c "; + script_execute = script_execute.replace(/\//g, "\\"); +} +else +{ + script_execute = path.join(dir_script, script+".sh"); + exists = fs.existsSync(script_execute); + caller_execute = "bash "; + script_execute = script_execute.replace(/\\/g, "/"); +} + +console.log("Execute: " + caller_execute + script_execute); + +if(exists) +{ + var child = shelljs.exec(caller_execute + script_execute, {async:true, silent:true}, function(err, stdout, stderr) { + process.exit(err); + }); + child.stdout.on('data', function(data) { + console.log(trim(data)); + }); + child.stderr.on('data', function(data) { + console.log(trim(data)); + }); +} +else +{ + console.log("[error] dont exits: " + script_execute); + process.exit(1); +} + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd new file mode 100644 index 0000000..2b6cea5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd @@ -0,0 +1,7 @@ +@echo off + +pip install pyyaml +if %errorlevel% neq 0 exit /b %errorlevel% + +pip install poster +if %errorlevel% neq 0 exit /b %errorlevel% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh new file mode 100644 index 0000000..e52dc93 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +if [[ "$OSTYPE" =~ ^linux ]]; then + curl -sL https://deb.nodesource.com/setup_8.x | sudo bash - + sudo apt install -y nodejs + sudo npm install -g npm + + # echo 'export PATH=$HOME/local/bin:$PATH' >> ~/.bashrc + # . ~/.bashrc + # mkdir ~/local + # mkdir ~/node-latest-install + # cd ~/node-latest-install + # curl http://nodejs.org/dist/node-latest.tar.gz | tar xz --strip-components=1 + # ./configure --prefix=~/local + # make install # ok, fine, this step probably takes more than 30 seconds... + # curl https://www.npmjs.org/install.sh | sh + # cd - + + sudo apt install -y lcov + sudo apt install -y cppcheck + sudo apt install -y libxaw7-dev # for OIS + sudo apt install -y libgl1-mesa-dev # flow glew + sudo apt install -y freeglut3 freeglut3-dev # for glu (needed for bullet2) + + # cmake 3.5 precompiled + DEPS_DIR=$(pwd)/deps + if [[ -d "$DEPS_DIR" ]]; then + rm -Rf $DEPS_DIR + fi + CMAKE_FILE=cmake-3.5.2-Linux-x86_64.tar.gz + CMAKE_URL=http://www.cmake.org/files/v3.5/${CMAKE_FILE} + wget ${CMAKE_URL} --quiet --no-check-certificate + mkdir -p cmake + tar -xzf ${CMAKE_FILE} -C cmake --strip-components 1 + mv cmake ${DEPS_DIR} + export PATH=${DEPS_DIR}/cmake/bin:${PATH} + cmake --version +else + /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" + brew update + brew doctor + export PATH="/usr/local/bin:$PATH" + brew install node + brew install cmake + brew install lcov + brew install cppcheck +fi +pip install --user pyyaml +pip install --user poster +pip install --user codecov diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd new file mode 100644 index 0000000..178869f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd @@ -0,0 +1,14 @@ +@echo off + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +echo running in mode %MODE% ... +cd %MODE% +cmake --build . --config %MODE% --target install +set lasterror=%errorlevel% +cd .. +exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh new file mode 100755 index 0000000..084a6ef --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh @@ -0,0 +1,16 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_TARGET="${CMAKI_TARGET:-install}" +export COMPILER_BASENAME=$(basename ${CC}) + +echo "running in mode $MODE ... ($COMPILER_BASENAME)" +cd $COMPILER_BASENAME/$MODE + +# CORES=$(grep -c ^processor /proc/cpuinfo) +CORES=12 +cmake --build . --config $MODE --target $CMAKI_TARGET -- -j$CORES -k VERBOSE=1 || cmake --build . --config $MODE --target $CMAKI_TARGET -- -j1 VERBOSE=1 +code=$? +exit $code diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd new file mode 100644 index 0000000..ae010cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd @@ -0,0 +1,28 @@ +@echo off + +if DEFINED COMPILER ( + echo Using COMPILER: %COMPILER% +) else ( + set COMPILER="Visual Studio" + echo Env var COMPILER is not defined. Using by default: %COMPILER% +) + +if DEFINED COMPILER_VERSION ( + echo Using COMPILER_VERSION: %COMPILER_VERSION% +) else ( + set COMPILER_VERSION=16 + echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% +) + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +if "%NPP_CI%" == "FALSE" ( + conan install . --build missing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% +) + +conan create . npm-mas-mas/testing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -tf None + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh new file mode 100644 index 0000000..8e84f01 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -e + +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if [ "$NPP_CI" == "FALSE" ]; then + conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION +fi + +conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh new file mode 100755 index 0000000..2b760f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh @@ -0,0 +1,22 @@ +#!/bin/bash +export IMAGE="${IMAGE:-linux-x64}" +export MODE="${MODE:-Debug}" +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export PACKAGE="${PACKAGE:-undefined}" + +docker run --rm makiolo/$IMAGE > ./dockcross-$IMAGE +sed -e "s#DEFAULT_DOCKCROSS_IMAGE=dockcross/$IMAGE#DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE#g" dockcross-$IMAGE > makiolo-$IMAGE +chmod +x ./makiolo-$IMAGE +if [ "$PACKAGE" == "undefined" ]; then + # CI + ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash' +else + # build package + ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE -e PACKAGE=$PACKAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.sh | CMAKI_INSTALL=$(pwd)/bin bash' +fi +error=$? + +# clean container +docker rmi -f makiolo/$IMAGE + +exit $error diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd new file mode 100644 index 0000000..7b70325 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd @@ -0,0 +1,6 @@ +@echo off +git checkout -b tmp +git checkout master +git merge master +git pull + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh new file mode 100755 index 0000000..48c48f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e +git checkout -b tmp +git checkout master +git merge master +git pull + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh new file mode 100755 index 0000000..ec6e0f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh @@ -0,0 +1,18 @@ +#!/bin/sh +PWD="`dirname \"$0\"`" + +cp -v $PWD/init/.travis.yml . +git add .travis.yml + +cp -v $PWD/init/appveyor.yml . +git add appveyor.yml + +cp -v $PWD/init/.clang-format . +git add .clang-format + +cp -v $PWD/init/.gitignore . +git add .gitignore + +cp -v $PWD/init/cmaki.yml . +git add cmaki.yml + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd new file mode 100644 index 0000000..3366ec8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd @@ -0,0 +1,30 @@ +@echo off + +:: IF DEFINED CMAKI_PWD ( +:: set CMAKI_PWD=%CMAKI_PWD% +:: ) else ( +:: set CMAKI_PWD=%CD% +:: ) +:: +:: IF DEFINED CMAKI_INSTALL ( +:: set CMAKI_INSTALL=%CMAKI_INSTALL% +:: ) else ( +:: set CMAKI_INSTALL=%CMAKI_PWD%/bin +:: ) + +IF DEFINED MODE ( + set MODE=%MODE% +) else ( + set MODE=Debug +) + +IF DEFINED YMLFILE ( + build --yaml=%YMLFILE% -d +) else ( + IF DEFINED PACKAGE ( + build %PACKAGE% -d + ) else ( + echo Error: must define env var YMLFILE or PACKAGE + ) +) + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh new file mode 100755 index 0000000..a0fd049 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export PACKAGE="${PACKAGE:-undefined}" +export YMLFILE="${YMLFILE:-undefined}" + +if [ "$YMLFILE" == "undefined" ]; then + if [ "$PACKAGE" == "undefined" ]; then + echo Error: must define env var YMLFILE or PACKAGE + else + echo building $PACKAGE ... + ./build $PACKAGE --no-back-yaml --no-run-tests -d + fi +else + echo building from yaml file: ${YMLFILE} ... + ./build --yaml=${YMLFILE} --no-run-tests -d +fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd new file mode 100644 index 0000000..87c7d0c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd @@ -0,0 +1,3 @@ +@echo off +git push && npm publish + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh new file mode 100755 index 0000000..c74a96f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh @@ -0,0 +1,3 @@ +#!/bin/bash +git push && npm publish + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh new file mode 100755 index 0000000..97884f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +MV="git mv" + +if [[ $3 == "run" ]]; +then + # do sed implace + run=" -i" +else + run="" +fi + +command="ag -w $1 -l --ignore artifacts --ignore node_modules --ignore gcc --ignore clang --ignore bin" +command_search_files="$command | grep -e $1.cpp$ -e $1.h$" +command_search_files_count="$command_search_files | xargs -I{} grep -h -e ^#include {} | grep -h $1 | wc -l" +count=$(eval $command_search_files_count) + +if [[ $count -gt 0 ]]; +then + echo "se renonbrara los siguientes ficheros (utilizando $MV):" + for file in $(eval $command_search_files); + do + destiny=$(echo $file | sed "s/\<$1\>/$2/g") + if [[ $3 == "run" ]]; + then + echo run: $MV $file $destiny + $MV $file $destiny + else + echo dry-run: $MV $file $destiny + fi + done +else + echo "No es necesario renombrar ficheros" +fi + +if [[ $3 == "run" ]]; +then + # echo run: "$command | xargs sed "s/\<$1\>/$2/g" $run" + echo replacing ... +else + echo replace in dry-run +fi +eval $command | xargs -I{} sed "s@\<$1\>@$2@g" $run {} + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd new file mode 100644 index 0000000..2acc40d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd @@ -0,0 +1,5 @@ +@echo off +call node_modules\cmaki\setup.cmd +call node_modules\cmaki\compile.cmd +call node_modules\cmaki\test.cmd + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh new file mode 100755 index 0000000..0e436b4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +ag -w --cpp $1 --ignore cmaki --ignore depends --ignore build --ignore cmaki_generator --ignore baul + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd new file mode 100644 index 0000000..8ac63c5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd @@ -0,0 +1,64 @@ +@echo off + +setlocal enableextensions + + +:: export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +:: export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if DEFINED COMPILER ( + echo Using COMPILER: %COMPILER% +) else ( + set COMPILER="Visual Studio" + echo Env var COMPILER is not defined. Using by default: %COMPILER% +) + +if DEFINED COMPILER_VERSION ( + echo Using COMPILER_VERSION: %COMPILER_VERSION% +) else ( + set COMPILER_VERSION=16 + echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% +) + +if DEFINED GENERATOR ( + echo Using Visual Studio generator: %GENERATOR% +) else ( + set GENERATOR=Visual Studio 16 2019 + echo Env var GENERATOR is not defined. Using by default: %GENERATOR% +) + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +if "%Platform%" == "x86" ( + set ARCH=x86 +) else ( + set GENERATOR=%GENERATOR% Win64 + set ARCH=x86_64 +) + +echo running in mode %COMPILER% %COMPILER_VERSION% %ARCH% %MODE% ... +if exist %MODE% (rmdir /s /q %MODE%) +md %MODE% + +:: setup +cd %MODE% + +conan install %CMAKI_PWD% --build never -s build_type=%MODE% -s arch=%ARCH% -s arch_build=%ARCH% -s compiler=%COMPILER% -s compiler.version=%COMPILER_VERSION% + +IF DEFINED Configuration ( + IF DEFINED Platform ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -G"%GENERATOR%" -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% + ) ELSE ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% + ) +) ELSE ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% +) + +set lasterror=%errorlevel% +cd %CMAKI_PWD% +exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh new file mode 100755 index 0000000..404e5a9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CMAKI_GENERATOR="${CMAKI_GENERATOR:-Unix Makefiles}" +export COVERAGE="${COVERAGE:-FALSE}" +export TESTS_VALGRIND="${TESTS_VALGRIND:-FALSE}" +export COMPILER_BASENAME=$(basename ${CC}) +export CMAKE_TOOLCHAIN_FILE="${CMAKE_TOOLCHAIN_FILE:-"no cross compile"}" +export BUILD_DIR="${BUILD_DIR:-${COMPILER_BASENAME}/${MODE}}" + +if [ "$CMAKE_TOOLCHAIN_FILE" == "no cross compile" ]; then + export CMAKE_TOOLCHAIN_FILE_FILEPATH="" +else + export CMAKE_TOOLCHAIN_FILE_FILEPATH=" -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" +fi + +echo "running in mode ${MODE} ... ($COMPILER_BASENAME) (${CC} / ${CXX})" + +# setup +if [ ! -d ${BUILD_DIR} ]; then + mkdir -p ${BUILD_DIR} +fi +echo BUILD_DIR=${BUILD_DIR} +cd ${BUILD_DIR} + +if [ -f "CMakeCache.txt" ]; then + rm CMakeCache.txt +fi + +export WITH_CONAN=0 +if [ -f "$CMAKI_PWD/conanfile.txt" ] || [ -f "$CMAKI_PWD/conanfile.py" ]; then + + if [ "$NPP_CI" == "FALSE" ]; then + conan install $CMAKI_PWD --build missing -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} + fi + + echo conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} + if ! conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION}; then + echo Error conan + exit 1 + fi + export WITH_CONAN=1 +fi + +cmake $CMAKI_PWD ${CMAKE_TOOLCHAIN_FILE_FILEPATH} -DCMAKE_MODULE_PATH=${CMAKI_PWD}/node_modules/npm-mas-mas/cmaki -DCMAKE_INSTALL_PREFIX=${CMAKI_INSTALL} -DCMAKE_BUILD_TYPE=${MODE} -DFIRST_ERROR=1 -G"${CMAKI_GENERATOR}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}" -DNPP_CACHE=${NPP_CACHE} -DCOVERAGE=${COVERAGE} -DTESTS_VALGRIND=${TESTS_VALGRIND} -DWITH_CONAN=${WITH_CONAN} +code=$? +exit ${code} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd new file mode 100644 index 0000000..33ee4fa --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd @@ -0,0 +1,15 @@ +@echo off + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +echo running in mode %MODE% ... +cd %MODE% +ctest . --no-compress-output --output-on-failure -T Test -C %MODE% -V +set lasterror=%errorlevel% +cd .. + +if %lasterror% neq 0 exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh new file mode 100755 index 0000000..30ddf60 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh @@ -0,0 +1,52 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export NOCODECOV="${NOCODECOV:-FALSE}" +export COVERAGE="${COVERAGE:-FALSE}" +export CPPCHECK="${CPPCHECK:-FALSE}" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export COMPILER_BASENAME=$(basename ${CC}) + +echo "running in mode $MODE ... ($COMPILER_BASENAME)" +mkdir -p $COMPILER_BASENAME/$MODE +cd $COMPILER_BASENAME/$MODE + +# tests +ctest . --no-compress-output --output-on-failure -T Test -C $MODE -V +code=$? + +# posttests +if [ "$COVERAGE" == "TRUE" ]; then + if [[ "$CC" == "gcc" ]]; then + if [[ "$MODE" == "Debug" ]]; then + find ../.. -name "*.gcno" -o -name "*.gcda" + lcov -c -i -d ../.. -o coverage.base + # aggregate coverage + lcov -c -d ../.. -o coverage.run + # merge pre & run + lcov -d ../.. -a coverage.base -a coverage.run -o coverage.info + lcov -r coverage.info '/usr/*' -o coverage.info + lcov -r coverage.info 'tests/*' -o coverage.info + lcov -r coverage.info 'gtest/*' -o coverage.info + lcov -r coverage.info 'gmock/*' -o coverage.info + lcov -r coverage.info 'node_modules/*' -o coverage.info + # lcov -l coverage.info + genhtml --no-branch-coverage -o ../../coverage/ coverage.info + if [ "$NOCODECOV" == "FALSE" ]; then + bash <(curl -s https://codecov.io/bash) || echo "Codecov did not collect coverage reports" + fi + rm -f coverage.base coverage.run coverage.info + fi + fi +fi + +if [ "$CPPCHECK" == "TRUE" ]; then + if [[ "$CC" == "gcc" ]]; then + if [[ "$MODE" == "Debug" ]]; then + cppcheck -i ../../node_modules -i ../../$COMPILER_BASENAME --inconclusive --check-config --max-configs=10 --enable=all -UDEBUG --inline-suppr ../.. + fi + fi +fi + +exit $code diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd new file mode 100644 index 0000000..74063e4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd @@ -0,0 +1,29 @@ +@echo off + +IF DEFINED CMAKI_PWD ( + set CMAKI_PWD=%CMAKI_PWD% +) else ( + set CMAKI_PWD=%CD% +) + +IF DEFINED CMAKI_INSTALL ( + set CMAKI_INSTALL=%CMAKI_INSTALL% +) else ( + set CMAKI_INSTALL=%CMAKI_PWD%/bin +) + +IF DEFINED MODE ( + set MODE=%MODE% +) else ( + set MODE=Debug +) + +set YMLFILE=%CMAKI_PWD%/cmaki.yml + +:: warning, TODO: detectar si hay cambios locales y avisar +git diff %CMAKI_PWD% + +cd %CMAKI_PWD%/node_modules/cmaki_generator +curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.cmd > __make_artifact.cmd +call __make_artifact.cmd +del __make_artifact.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh new file mode 100755 index 0000000..a088a9e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh @@ -0,0 +1,12 @@ +#!/bin/bash -e + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export YMLFILE=$CMAKI_PWD/cmaki.yml + +git diff $CMAKI_PWD +cd $CMAKI_PWD/node_modules/npm-mas-mas/cmaki_generator +../cmaki_scripts/make_artifact.sh + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd new file mode 100644 index 0000000..7d4bb06 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd @@ -0,0 +1,5 @@ +@echo off + +# upload package +conan upload '*' -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh new file mode 100644 index 0000000..f62d19d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -e + +# upload package +conan upload '*' -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml new file mode 100644 index 0000000..8c0ae81 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml @@ -0,0 +1,32 @@ +version: '3' +services: + linux64: + build: + context: . + dockerfile: ./docker/Dockerfile.linux-x64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + + windows64: + build: + context: . + dockerfile: ./docker/Dockerfile.windows-x64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + + android64: + build: + context: . + dockerfile: ./docker/Dockerfile.android-arm64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 new file mode 100644 index 0000000..e5b726a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 @@ -0,0 +1,9 @@ +FROM dockcross/android-arm64 +ENV PYTHONUNBUFFERED 1 +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +WORKDIR /work +ADD requirements.txt /work +RUN pip install -r requirements.txt + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 new file mode 100644 index 0000000..4a132bd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 @@ -0,0 +1,16 @@ +FROM dockcross/linux-x64 +ENV PYTHONUNBUFFERED 1 +RUN echo 'deb http://ftp.us.debian.org/debian testing main contrib non-free' > /etc/apt/sources.list.d/gcc.testing.list +RUN apt-get update +RUN apt-get install -y -t testing g++ +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +RUN apt install -y libgl1-mesa-dev +RUN apt install -y libx11-dev +RUN apt install -y python3-pip +WORKDIR /work +RUN pip3 install conan==1.6.1 +RUN pip3 install pyyaml==3.13 +RUN pip3 install requests==2.19.1 + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 new file mode 100644 index 0000000..d30d465 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 @@ -0,0 +1,9 @@ +FROM dockcross/windows-x64 +ENV PYTHONUNBUFFERED 1 +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +WORKDIR /work +ADD requirements.txt /work +RUN pip install -r requirements.txt + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh new file mode 100755 index 0000000..122cdaf --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh @@ -0,0 +1,21 @@ +#!//bin/bash + +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if [ "$(uname)" == "Darwin" ]; then + # mac + export COMPILER=apple-clang COMPILER_VERSION=10.0 COMPILER_LIBCXX=libc++ +fi + +# compile 3rd parties +# conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION +# conan create . npm-mas-mas/testing --build $PACKAGE -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None +# conan upload $PACKAGE/*@npm-mas-mas/testing -r npm-mas-mas --all -c + +# compile only $PACKAGE +conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None +conan upload *@npm-mas-mas/testing -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json new file mode 100644 index 0000000..72449de --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json @@ -0,0 +1,29 @@ +{ + "name": "npm-mas-mas", + "version": "0.0.1", + "description": "npm extension for use packing system with C++", + "bin": { + "cmaki": "./cmaki_scripts/cmaki.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/makiolo/npm-mas-mas.git" + }, + "keywords": [ + "cmake", + "c++", + "artifacts" + ], + "author": "Ricardo Marmolejo García", + "license": "MIT", + "bugs": { + "url": "https://github.com/makiolo/npm-mas-mas/issues" + }, + "homepage": "https://github.com/makiolo/npm-mas-mas#readme", + "dependencies": { + "shelljs": ">=0.8.5" + } +} + + + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile new file mode 100644 index 0000000..883467b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile @@ -0,0 +1,15 @@ +FROM nimmis/apache-php5 + +MAINTAINER Ricardo Marmolejo García + +RUN echo "upload_max_filesize=800M" >> /etc/php5/apache2/php.ini +RUN echo "post_max_size=800M" >> /etc/php5/apache2/php.ini +RUN echo "max_input_time=300" >> /etc/php5/apache2/php.ini +RUN echo "max_execution_time=300" >> /etc/php5/apache2/php.ini +RUN echo "error_reporting = E_ALL" >> /etc/php5/apache2/php.ini +RUN echo "display_errors = On" >> /etc/php5/apache2/php.ini + +WORKDIR /var/www/html/cpp +RUN mkdir -p /var/www/html/packages +RUN chmod -R 777 /var/www/html/packages + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile new file mode 100644 index 0000000..efbcbeb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile @@ -0,0 +1,3 @@ +all: + docker-compose up -d --build + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md new file mode 100644 index 0000000..31554b7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md @@ -0,0 +1,4 @@ +need edit php.ini: + +upload_max_filesize = 500M +post_max_size = 500M diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md new file mode 100644 index 0000000..7f52707 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md @@ -0,0 +1,10 @@ +# servfactor +- default artifacts path is $(pwd)/packages (can use symbolic links) +- chmod o+w packages/ +- chmod o+w packages/stats.txt + +# php.ini +- upload_max_filesize=800M +- post_max_size=800M +- max_input_time=300 +- max_execution_time=300 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml new file mode 100644 index 0000000..9d85ed5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3' +services: + servfactor: + build: . + volumes: + - .:/var/www/html/cpp + - ./packages:/var/www/html/packages + ports: + - "8080:80" + restart: always + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php new file mode 100755 index 0000000..6f536d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php @@ -0,0 +1,58 @@ + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php new file mode 100755 index 0000000..6881558 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php @@ -0,0 +1,227 @@ += 1024 && $i < ( count( $types ) -1 ); $bytes /= 1024, $i++ ); + return( round( $bytes, 2 ) . " " . $types[$i] ); +} + +if(!$quiet_mode) +{ + +/* get disk space free (in bytes) */ +$df = disk_free_space($packages_dir); +/* and get disk space total (in bytes) */ +$dt = disk_total_space($packages_dir); +/* now we calculate the disk space used (in bytes) */ +$du = $dt - $df; +/* percentage of disk used - this will be used to also set the width % of the progress bar */ +$dp = sprintf('%.2f',($du / $dt) * 100); + +/* and we formate the size from bytes to MB, GB, etc. */ +$df = formatSize($df); +$du = formatSize($du); +$dt = formatSize($dt); + +?> + + + + +
+
% Disk Used
+
+
+ + + +
+
+format("c")); + } +} +arsort($arr); +$arr = array_keys($arr); + +if(!$quiet_mode) +{ + if($dp > 95) + { + for ($i = 1; $i <= 10; $i++) { + $last_file = array_pop($arr); + if(u::ends_with($last_file, "-cmake.tar.gz")) + { + $big_file = str_replace("-cmake.tar.gz", ".tar.gz", $last_file); + if(!unlink($dir . $last_file)) + { + echo "error removing ".$last_file."
"; + } + else + { + echo "removed ".$last_file."
"; + } + + if(!unlink($dir . $big_file)) + { + echo "error removing ".$dir.$big_file."
"; + } + else + { + echo "removed ".$dir.$big_file."
"; + } + break; + } + } + } +} + +foreach($arr as $file) +{ + // bug si el package tiene "-" + if(u::ends_with($file, "-cmake.tar.gz")) + { + // $substance = $file; + + preg_match('/([\w-]+)-([0-9\.]+)-([\w-\.]+)-cmake.tar.gz/', $file, $matches); + $package = $matches[1]; + $version = $matches[2]; + $platform = $matches[3]; + + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $platform = substr($substance, strrpos($substance, "-")+1); + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $version = substr($substance, strrpos($substance, "-")+1); + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $package = $substance; + if(!isset($_REQUEST['platform']) || ($_REQUEST['platform'] == $platform)) + { + $hits_info = get_hits($data, $file); + $hits = $hits_info[0]; + $last_download = $hits_info[1]; + if($last_download === NULL) + { + if(!$quiet_mode) + { + $formatted = "never downloaded"; + } + else + { + $formatted = "---"; + } + } + else + { + if(!$quiet_mode) + { + $formatted = $last_download->format("d-m-Y H:i"); + } + else + { + $formatted = $last_download->format("c"); + } + } + if(!$quiet_mode) + { + echo "" . $package ." (" . $version . ") "; + if($hits > 0) + { + echo "$platform (".$hits." hits, last use: ".$formatted.")"; + } + else + { + echo "$platform (".$hits." hits)"; + } + echo "
"; + } + else + { + print $package.";".$version.";".$platform.";"."download.php?file=".$file.";".$hits.";".$formatted."\n"; + } + } + } +} + +?> + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md new file mode 100644 index 0000000..734fc3d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md @@ -0,0 +1,2 @@ +packages dir + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php new file mode 100644 index 0000000..1ab9900 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php @@ -0,0 +1,68 @@ +
"; + }; + $f = fopen($stats, 'r'); + $data = fread($f, filesize($stats)); + $data = unserialize($data); + fclose($f); + } + else + { + $data = array(); + } + + return $data; +} + +function inc_stats($data, $key) +{ + $key = basename($key); + + if(array_key_exists($key, $data)) + { + $data[$key][0] = $data[$key][0] + 1; + $data[$key][1] = new DateTime('NOW'); + } + else + { + $data[$key] = array(1, new DateTime('NOW')); + } + return $data; +} + +function get_hits($data, $key) +{ + $key = basename($key); + + if(array_key_exists($key, $data)) + { + return $data[$key]; + } + else + { + return array(0, NULL); + } +} + +function write_stats($data) +{ + global $stats; + $f = fopen($stats, 'w'); + $data = serialize($data); + fwrite($f, $data); + fclose($f); +} + +?> + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php new file mode 100644 index 0000000..f57bc22 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php @@ -0,0 +1,76 @@ +" . $artifacts . ""; + +if(!is_writable($artifacts)) +{ + echo "I don't have permission
"; + exit(1); +} + +$uploaded_file = $artifacts . "/" . basename($_FILES['uploaded']['name']); + +// if(isset($_FILES['uploaded']) && file_exists($uploaded_file)) +// { +// echo "file: ".$uploaded_file." already esxists!"; +// exit(1); +// } + +if (move_uploaded_file($_FILES['uploaded']['tmp_name'], $uploaded_file)) +{ + echo "El fichero es valido y se subio con exito: ". $uploaded_file .".\n"; +} +else +{ +?> +
+ Enviar este fichero: + +
+
+ + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php new file mode 100755 index 0000000..ac69f78 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php @@ -0,0 +1,2584 @@ + + * @link http://github.com/brandonwamboldt/utilphp/ Official Documentation + */ +class util +{ + /** + * A constant representing the number of seconds in a minute, for + * making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_MINUTE = 60; + + /** + * A constant representing the number of seconds in an hour, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_HOUR = 3600; + const SECONDS_IN_AN_HOUR = 3600; + + /** + * A constant representing the number of seconds in a day, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_DAY = 86400; + + /** + * A constant representing the number of seconds in a week, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_WEEK = 604800; + + /** + * A constant representing the number of seconds in a month (30 days), + * for making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_MONTH = 2592000; + + /** + * A constant representing the number of seconds in a year (365 days), + * for making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_YEAR = 31536000; + + /** + * URL constants as defined in the PHP Manual under "Constants usable with + * http_build_url()". + * + * @see http://us2.php.net/manual/en/http.constants.php#http.constants.url + */ + const HTTP_URL_REPLACE = 1; + const HTTP_URL_JOIN_PATH = 2; + const HTTP_URL_JOIN_QUERY = 4; + const HTTP_URL_STRIP_USER = 8; + const HTTP_URL_STRIP_PASS = 16; + const HTTP_URL_STRIP_AUTH = 32; + const HTTP_URL_STRIP_PORT = 64; + const HTTP_URL_STRIP_PATH = 128; + const HTTP_URL_STRIP_QUERY = 256; + const HTTP_URL_STRIP_FRAGMENT = 512; + const HTTP_URL_STRIP_ALL = 1024; + + /** + * A collapse icon, using in the dump_var function to allow collapsing + * an array or object + * + * @var string + */ + public static $icon_collapse = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFNzFDNDQyNEMyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFNzFDNDQyM0MyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NDlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuF4AWkAAAA2UExURU9t2DBStczM/1h16DNmzHiW7iNFrypMvrnD52yJ4ezs7Onp6ejo6P///+Tk5GSG7D9h5SRGq0Q2K74AAAA/SURBVHjaLMhZDsAgDANRY3ZISnP/y1ZWeV+jAeuRSky6cKL4ryDdSggP8UC7r6GvR1YHxjazPQDmVzI/AQYAnFQDdVSJ80EAAAAASUVORK5CYII='; + + /** + * A collapse icon, using in the dump_var function to allow collapsing + * an array or object + * + * @var string + */ + public static $icon_expand = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFQzZERTJDNEMyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFQzZERTJDM0MyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3MzlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PkmDvWIAAABIUExURU9t2MzM/3iW7ubm59/f5urq85mZzOvr6////9ra38zMzObm5rfB8FZz5myJ4SNFrypMvjBStTNmzOvr+mSG7OXl8T9h5SRGq/OfqCEAAABKSURBVHjaFMlbEoAwCEPRULXF2jdW9r9T4czcyUdA4XWB0IgdNSybxU9amMzHzDlPKKu7Fd1e6+wY195jW0ARYZECxPq5Gn8BBgCr0gQmxpjKAwAAAABJRU5ErkJggg=='; + + private static $hasArray = false; + + /** + * Map of special non-ASCII characters and suitable ASCII replacement + * characters. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + public static $maps = array( + 'de' => array(/* German */ + 'Ä' => 'Ae', 'Ö' => 'Oe', 'Ü' => 'Ue', 'ä' => 'ae', 'ö' => 'oe', 'ü' => 'ue', 'ß' => 'ss', + 'ẞ' => 'SS' + ), + 'latin' => array( + 'À' => 'A', 'Á' => 'A', 'Â' => 'A', 'Ã' => 'A', 'Ä' => 'A', 'Å' => 'A','Ă' => 'A', 'Æ' => 'AE', 'Ç' => + 'C', 'È' => 'E', 'É' => 'E', 'Ê' => 'E', 'Ë' => 'E', 'Ì' => 'I', 'Í' => 'I', 'Î' => 'I', + 'Ï' => 'I', 'Ð' => 'D', 'Ñ' => 'N', 'Ò' => 'O', 'Ó' => 'O', 'Ô' => 'O', 'Õ' => 'O', 'Ö' => + 'O', 'Ő' => 'O', 'Ø' => 'O','Ș' => 'S','Ț' => 'T', 'Ù' => 'U', 'Ú' => 'U', 'Û' => 'U', 'Ü' => 'U', 'Ű' => 'U', + 'Ý' => 'Y', 'Þ' => 'TH', 'ß' => 'ss', 'à' => 'a', 'á' => 'a', 'â' => 'a', 'ã' => 'a', 'ä' => + 'a', 'å' => 'a', 'ă' => 'a', 'æ' => 'ae', 'ç' => 'c', 'è' => 'e', 'é' => 'e', 'ê' => 'e', 'ë' => 'e', + 'ì' => 'i', 'í' => 'i', 'î' => 'i', 'ï' => 'i', 'ð' => 'd', 'ñ' => 'n', 'ò' => 'o', 'ó' => + 'o', 'ô' => 'o', 'õ' => 'o', 'ö' => 'o', 'ő' => 'o', 'ø' => 'o', 'ș' => 's', 'ț' => 't', 'ù' => 'u', 'ú' => 'u', + 'û' => 'u', 'ü' => 'u', 'ű' => 'u', 'ý' => 'y', 'þ' => 'th', 'ÿ' => 'y' + ), + 'latin_symbols' => array( + '©' => '(c)', + '®' => '(r)' + ), + 'el' => array(/* Greek */ + 'α' => 'a', 'β' => 'b', 'γ' => 'g', 'δ' => 'd', 'ε' => 'e', 'ζ' => 'z', 'η' => 'h', 'θ' => '8', + 'ι' => 'i', 'κ' => 'k', 'λ' => 'l', 'μ' => 'm', 'ν' => 'n', 'ξ' => '3', 'ο' => 'o', 'π' => 'p', + 'ρ' => 'r', 'σ' => 's', 'τ' => 't', 'υ' => 'y', 'φ' => 'f', 'χ' => 'x', 'ψ' => 'ps', 'ω' => 'w', + 'ά' => 'a', 'έ' => 'e', 'ί' => 'i', 'ό' => 'o', 'ύ' => 'y', 'ή' => 'h', 'ώ' => 'w', 'ς' => 's', + 'ϊ' => 'i', 'ΰ' => 'y', 'ϋ' => 'y', 'ΐ' => 'i', + 'Α' => 'A', 'Β' => 'B', 'Γ' => 'G', 'Δ' => 'D', 'Ε' => 'E', 'Ζ' => 'Z', 'Η' => 'H', 'Θ' => '8', + 'Ι' => 'I', 'Κ' => 'K', 'Λ' => 'L', 'Μ' => 'M', 'Ν' => 'N', 'Ξ' => '3', 'Ο' => 'O', 'Π' => 'P', + 'Ρ' => 'R', 'Σ' => 'S', 'Τ' => 'T', 'Υ' => 'Y', 'Φ' => 'F', 'Χ' => 'X', 'Ψ' => 'PS', 'Ω' => 'W', + 'Ά' => 'A', 'Έ' => 'E', 'Ί' => 'I', 'Ό' => 'O', 'Ύ' => 'Y', 'Ή' => 'H', 'Ώ' => 'W', 'Ϊ' => 'I', + 'Ϋ' => 'Y' + ), + 'tr' => array(/* Turkish */ + 'ş' => 's', 'Ş' => 'S', 'ı' => 'i', 'İ' => 'I', 'ç' => 'c', 'Ç' => 'C', 'ü' => 'u', 'Ü' => 'U', + 'ö' => 'o', 'Ö' => 'O', 'ğ' => 'g', 'Ğ' => 'G' + ), + 'ru' => array(/* Russian */ + 'а' => 'a', 'б' => 'b', 'в' => 'v', 'г' => 'g', 'д' => 'd', 'е' => 'e', 'ё' => 'yo', 'ж' => 'zh', + 'з' => 'z', 'и' => 'i', 'й' => 'j', 'к' => 'k', 'л' => 'l', 'м' => 'm', 'н' => 'n', 'о' => 'o', + 'п' => 'p', 'р' => 'r', 'с' => 's', 'т' => 't', 'у' => 'u', 'ф' => 'f', 'х' => 'h', 'ц' => 'c', + 'ч' => 'ch', 'ш' => 'sh', 'щ' => 'sh', 'ъ' => '', 'ы' => 'y', 'ь' => '', 'э' => 'e', 'ю' => 'yu', + 'я' => 'ya', + 'А' => 'A', 'Б' => 'B', 'В' => 'V', 'Г' => 'G', 'Д' => 'D', 'Е' => 'E', 'Ё' => 'Yo', 'Ж' => 'Zh', + 'З' => 'Z', 'И' => 'I', 'Й' => 'J', 'К' => 'K', 'Л' => 'L', 'М' => 'M', 'Н' => 'N', 'О' => 'O', + 'П' => 'P', 'Р' => 'R', 'С' => 'S', 'Т' => 'T', 'У' => 'U', 'Ф' => 'F', 'Х' => 'H', 'Ц' => 'C', + 'Ч' => 'Ch', 'Ш' => 'Sh', 'Щ' => 'Sh', 'Ъ' => '', 'Ы' => 'Y', 'Ь' => '', 'Э' => 'E', 'Ю' => 'Yu', + 'Я' => 'Ya', + '№' => '' + ), + 'uk' => array(/* Ukrainian */ + 'Є' => 'Ye', 'І' => 'I', 'Ї' => 'Yi', 'Ґ' => 'G', 'є' => 'ye', 'і' => 'i', 'ї' => 'yi', 'ґ' => 'g' + ), + 'cs' => array(/* Czech */ + 'č' => 'c', 'ď' => 'd', 'ě' => 'e', 'ň' => 'n', 'ř' => 'r', 'š' => 's', 'ť' => 't', 'ů' => 'u', + 'ž' => 'z', 'Č' => 'C', 'Ď' => 'D', 'Ě' => 'E', 'Ň' => 'N', 'Ř' => 'R', 'Š' => 'S', 'Ť' => 'T', + 'Ů' => 'U', 'Ž' => 'Z' + ), + 'pl' => array(/* Polish */ + 'ą' => 'a', 'ć' => 'c', 'ę' => 'e', 'ł' => 'l', 'ń' => 'n', 'ó' => 'o', 'ś' => 's', 'ź' => 'z', + 'ż' => 'z', 'Ą' => 'A', 'Ć' => 'C', 'Ę' => 'e', 'Ł' => 'L', 'Ń' => 'N', 'Ó' => 'O', 'Ś' => 'S', + 'Ź' => 'Z', 'Ż' => 'Z' + ), + 'ro' => array(/* Romanian */ + 'ă' => 'a', 'â' => 'a', 'î' => 'i', 'ș' => 's', 'ț' => 't', 'Ţ' => 'T', 'ţ' => 't' + ), + 'lv' => array(/* Latvian */ + 'ā' => 'a', 'č' => 'c', 'ē' => 'e', 'ģ' => 'g', 'ī' => 'i', 'ķ' => 'k', 'ļ' => 'l', 'ņ' => 'n', + 'š' => 's', 'ū' => 'u', 'ž' => 'z', 'Ā' => 'A', 'Č' => 'C', 'Ē' => 'E', 'Ģ' => 'G', 'Ī' => 'i', + 'Ķ' => 'k', 'Ļ' => 'L', 'Ņ' => 'N', 'Š' => 'S', 'Ū' => 'u', 'Ž' => 'Z' + ), + 'lt' => array(/* Lithuanian */ + 'ą' => 'a', 'č' => 'c', 'ę' => 'e', 'ė' => 'e', 'į' => 'i', 'š' => 's', 'ų' => 'u', 'ū' => 'u', 'ž' => 'z', + 'Ą' => 'A', 'Č' => 'C', 'Ę' => 'E', 'Ė' => 'E', 'Į' => 'I', 'Š' => 'S', 'Ų' => 'U', 'Ū' => 'U', 'Ž' => 'Z' + ), + 'vn' => array(/* Vietnamese */ + 'Á' => 'A', 'À' => 'A', 'Ả' => 'A', 'Ã' => 'A', 'Ạ' => 'A', 'Ă' => 'A', 'Ắ' => 'A', 'Ằ' => 'A', 'Ẳ' => 'A', 'Ẵ' => 'A', 'Ặ' => 'A', 'Â' => 'A', 'Ấ' => 'A', 'Ầ' => 'A', 'Ẩ' => 'A', 'Ẫ' => 'A', 'Ậ' => 'A', + 'á' => 'a', 'à' => 'a', 'ả' => 'a', 'ã' => 'a', 'ạ' => 'a', 'ă' => 'a', 'ắ' => 'a', 'ằ' => 'a', 'ẳ' => 'a', 'ẵ' => 'a', 'ặ' => 'a', 'â' => 'a', 'ấ' => 'a', 'ầ' => 'a', 'ẩ' => 'a', 'ẫ' => 'a', 'ậ' => 'a', + 'É' => 'E', 'È' => 'E', 'Ẻ' => 'E', 'Ẽ' => 'E', 'Ẹ' => 'E', 'Ê' => 'E', 'Ế' => 'E', 'Ề' => 'E', 'Ể' => 'E', 'Ễ' => 'E', 'Ệ' => 'E', + 'é' => 'e', 'è' => 'e', 'ẻ' => 'e', 'ẽ' => 'e', 'ẹ' => 'e', 'ê' => 'e', 'ế' => 'e', 'ề' => 'e', 'ể' => 'e', 'ễ' => 'e', 'ệ' => 'e', + 'Í' => 'I', 'Ì' => 'I', 'Ỉ' => 'I', 'Ĩ' => 'I', 'Ị' => 'I', 'í' => 'i', 'ì' => 'i', 'ỉ' => 'i', 'ĩ' => 'i', 'ị' => 'i', + 'Ó' => 'O', 'Ò' => 'O', 'Ỏ' => 'O', 'Õ' => 'O', 'Ọ' => 'O', 'Ô' => 'O', 'Ố' => 'O', 'Ồ' => 'O', 'Ổ' => 'O', 'Ỗ' => 'O', 'Ộ' => 'O', 'Ơ' => 'O', 'Ớ' => 'O', 'Ờ' => 'O', 'Ở' => 'O', 'Ỡ' => 'O', 'Ợ' => 'O', + 'ó' => 'o', 'ò' => 'o', 'ỏ' => 'o', 'õ' => 'o', 'ọ' => 'o', 'ô' => 'o', 'ố' => 'o', 'ồ' => 'o', 'ổ' => 'o', 'ỗ' => 'o', 'ộ' => 'o', 'ơ' => 'o', 'ớ' => 'o', 'ờ' => 'o', 'ở' => 'o', 'ỡ' => 'o', 'ợ' => 'o', + 'Ú' => 'U', 'Ù' => 'U', 'Ủ' => 'U', 'Ũ' => 'U', 'Ụ' => 'U', 'Ư' => 'U', 'Ứ' => 'U', 'Ừ' => 'U', 'Ử' => 'U', 'Ữ' => 'U', 'Ự' => 'U', + 'ú' => 'u', 'ù' => 'u', 'ủ' => 'u', 'ũ' => 'u', 'ụ' => 'u', 'ư' => 'u', 'ứ' => 'u', 'ừ' => 'u', 'ử' => 'u', 'ữ' => 'u', 'ự' => 'u', + 'Ý' => 'Y', 'Ỳ' => 'Y', 'Ỷ' => 'Y', 'Ỹ' => 'Y', 'Ỵ' => 'Y', 'ý' => 'y', 'ỳ' => 'y', 'ỷ' => 'y', 'ỹ' => 'y', 'ỵ' => 'y', + 'Đ' => 'D', 'đ' => 'd' + ), + 'ar' => array(/* Arabic */ + 'أ' => 'a', 'ب' => 'b', 'ت' => 't', 'ث' => 'th', 'ج' => 'g', 'ح' => 'h', 'خ' => 'kh', 'د' => 'd', + 'ذ' => 'th', 'ر' => 'r', 'ز' => 'z', 'س' => 's', 'ش' => 'sh', 'ص' => 's', 'ض' => 'd', 'ط' => 't', + 'ظ' => 'th', 'ع' => 'aa', 'غ' => 'gh', 'ف' => 'f', 'ق' => 'k', 'ك' => 'k', 'ل' => 'l', 'م' => 'm', + 'ن' => 'n', 'ه' => 'h', 'و' => 'o', 'ي' => 'y' + ), + 'sr' => array(/* Serbian */ + 'ђ' => 'dj', 'ј' => 'j', 'љ' => 'lj', 'њ' => 'nj', 'ћ' => 'c', 'џ' => 'dz', 'đ' => 'dj', + 'Ђ' => 'Dj', 'Ј' => 'j', 'Љ' => 'Lj', 'Њ' => 'Nj', 'Ћ' => 'C', 'Џ' => 'Dz', 'Đ' => 'Dj' + ), + 'az' => array(/* Azerbaijani */ + 'ç' => 'c', 'ə' => 'e', 'ğ' => 'g', 'ı' => 'i', 'ö' => 'o', 'ş' => 's', 'ü' => 'u', + 'Ç' => 'C', 'Ə' => 'E', 'Ğ' => 'G', 'İ' => 'I', 'Ö' => 'O', 'Ş' => 'S', 'Ü' => 'U' + ), + 'fi' => array(/* Finnish */ + 'ä' => 'a', + 'ö' => 'o' + ), + ); + + /** + * The character map for the designated language + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $map = array(); + + /** + * The character list as a string. + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $chars = ''; + + /** + * The character list as a regular expression. + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $regex = ''; + + /** + * The current language + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $language = ''; + + /** + * Initializes the character map. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static function initLanguageMap($language = '') + { + if (count(self::$map) > 0 && (($language == '') || ($language == self::$language))) { + return; + } + + // Is a specific map associated with $language? + if (isset(self::$maps[$language]) && is_array(self::$maps[$language])) { + // Move this map to end. This means it will have priority over others + $m = self::$maps[$language]; + unset(self::$maps[$language]); + self::$maps[$language] = $m; + } + + // Reset static vars + self::$language = $language; + self::$map = array(); + self::$chars = ''; + + foreach (self::$maps as $map) { + foreach ($map as $orig => $conv) { + self::$map[$orig] = $conv; + self::$chars .= $orig; + } + } + + self::$regex = '/[' . self::$chars . ']/u'; + } + + /** + * Remove the duplicates from an array. + * + * This is faster version than the builtin array_unique(). + * + * Notes on time requirements: + * array_unique -> O(n log n) + * array_flip -> O(n) + * + * http://stackoverflow.com/questions/8321620/array-unique-vs-array-flip + * http://php.net/manual/en/function.array-unique.php + * + * @param $array + * @return $array + */ + public static function fast_array_unique($array) + { + $array = array_keys(array_flip($array)); + + return $array; + } + + /** + * Access an array index, retrieving the value stored there if it + * exists or a default if it does not. This function allows you to + * concisely access an index which may or may not exist without + * raising a warning. + * + * @param array $var Array value to access + * @param mixed $default Default value to return if the key is not + * present in the array + * @return mixed + */ + public static function array_get(&$var, $default = null) + { + if (isset($var)) { + return $var; + } + + return $default; + } + + /** + * Display a variable's contents using nice HTML formatting and will + * properly display the value of booleans as true or false + * + * @see recursiveVarDumpHelper() + * + * @param mixed $var The variable to dump + * @return string + */ + public static function var_dump($var, $return = false, $expandLevel = 1) + { + self::$hasArray = false; + $toggScript = 'var colToggle = function(toggID) {var img = document.getElementById(toggID);if (document.getElementById(toggID + "-collapsable").style.display == "none") {document.getElementById(toggID + "-collapsable").style.display = "inline";setImg(toggID, 0);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling;while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}} else {document.getElementById(toggID + "-collapsable").style.display = "none";setImg(toggID, 1);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling; while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}}};'; + $imgScript = 'var setImg = function(objID,imgID,addStyle) {var imgStore = ["data:image/png;base64,' . self::$icon_collapse . '", "data:image/png;base64,' . self::$icon_expand . '"];if (objID) {document.getElementById(objID).setAttribute("src", imgStore[imgID]);if (addStyle){document.getElementById(objID).setAttribute("style", "position:relative;left:-5px;top:-1px;cursor:pointer;");}}};'; + $jsCode = preg_replace('/ +/', ' ', ''); + $html = '
';
+        $done  = array();
+        $html .= self::recursiveVarDumpHelper($var, intval($expandLevel), 0, $done);
+        $html .= '
'; + + if (self::$hasArray) { + $html = $jsCode . $html; + } + + if (!$return) { + echo $html; + } + + return $html; + } + + /** + * Display a variable's contents using nice HTML formatting (Without + * the
 tag) and will properly display the values of variables
+     * like booleans and resources. Supports collapsable arrays and objects
+     * as well.
+     *
+     * @param  mixed $var The variable to dump
+     * @return string
+     */
+    protected static function recursiveVarDumpHelper($var, $expLevel, $depth = 0, $done = array())
+    {
+        $html = '';
+
+        if ($expLevel > 0) {
+            $expLevel--;
+            $setImg = 0;
+            $setStyle = 'display:inline;';
+        } elseif ($expLevel == 0) {
+            $setImg = 1;
+            $setStyle='display:none;';
+        } elseif ($expLevel < 0) {
+            $setImg = 0;
+            $setStyle = 'display:inline;';
+        }
+
+        if (is_bool($var)) {
+            $html .= 'bool(' . (($var) ? 'true' : 'false') . ')';
+        } elseif (is_int($var)) {
+            $html .= 'int(' . $var . ')';
+        } elseif (is_float($var)) {
+            $html .= 'float(' . $var . ')';
+        } elseif (is_string($var)) {
+            $html .= 'string(' . strlen($var) . ') "' . self::htmlentities($var) . '"';
+        } elseif (is_null($var)) {
+            $html .= 'NULL';
+        } elseif (is_resource($var)) {
+            $html .= 'resource("' . get_resource_type($var) . '") "' . $var . '"';
+        } elseif (is_array($var)) {
+            // Check for recursion
+            if ($depth > 0) {
+                foreach ($done as $prev) {
+                    if ($prev === $var) {
+                        $html .= 'array(' . count($var) . ') *RECURSION DETECTED*';
+                        return $html;
+                    }
+                }
+
+                // Keep track of variables we have already processed to detect recursion
+                $done[] = &$var;
+            }
+
+            self::$hasArray = true;
+            $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000);
+
+            $html .= (!empty($var) ? ' ' : '') . 'array(' . count($var) . ')';
+            if (!empty($var)) {
+                $html .= ' 
[
'; + + $indent = 4; + $longest_key = 0; + + foreach ($var as $key => $value) { + if (is_string($key)) { + $longest_key = max($longest_key, strlen($key) + 2); + } else { + $longest_key = max($longest_key, strlen($key)); + } + } + + foreach ($var as $key => $value) { + if (is_numeric($key)) { + $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); + } else { + $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); + } + + $html .= ' => '; + + $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); + + foreach ($value as $line => $val) { + if ($line != 0) { + $value[$line] = str_repeat(' ', $indent * 2) . $val; + } + } + + $html .= implode('
', $value) . '
'; + } + + $html .= ']
'; + } + } elseif (is_object($var)) { + // Check for recursion + foreach ($done as $prev) { + if ($prev === $var) { + $html .= 'object(' . get_class($var) . ') *RECURSION DETECTED*'; + return $html; + } + } + + // Keep track of variables we have already processed to detect recursion + $done[] = &$var; + + self::$hasArray=true; + $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000); + + $html .= ' object(' . get_class($var) . ')
[
'; + + $varArray = (array) $var; + + $indent = 4; + $longest_key = 0; + + foreach ($varArray as $key => $value) { + if (substr($key, 0, 2) == "\0*") { + unset($varArray[$key]); + $key = 'protected:' . substr($key, 3); + $varArray[$key] = $value; + } elseif (substr($key, 0, 1) == "\0") { + unset($varArray[$key]); + $key = 'private:' . substr($key, 1, strpos(substr($key, 1), "\0")) . ':' . substr($key, strpos(substr($key, 1), "\0") + 2); + $varArray[$key] = $value; + } + + if (is_string($key)) { + $longest_key = max($longest_key, strlen($key) + 2); + } else { + $longest_key = max($longest_key, strlen($key)); + } + } + + foreach ($varArray as $key => $value) { + if (is_numeric($key)) { + $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); + } else { + $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); + } + + $html .= ' => '; + + $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); + + foreach ($value as $line => $val) { + if ($line != 0) { + $value[$line] = str_repeat(' ', $indent * 2) . $val; + } + } + + $html .= implode('
', $value) . '
'; + } + + $html .= ']
'; + } + + return $html; + } + + /** + * Converts any accent characters to their equivalent normal characters + * and converts any other non-alphanumeric characters to dashes, then + * converts any sequence of two or more dashes to a single dash. This + * function generates slugs safe for use as URLs, and if you pass true + * as the second parameter, it will create strings safe for use as CSS + * classes or IDs. + * + * @param string $string A string to convert to a slug + * @param string $separator The string to separate words with + * @param boolean $css_mode Whether or not to generate strings safe for + * CSS classes/IDs (Default to false) + * @return string + */ + public static function slugify($string, $separator = '-', $css_mode = false) + { + // Compatibility with 1.0.* parameter ordering for semver + if ($separator === true || $separator === false) { + $css_mode = $separator; + $separator = '-'; + + // Raise deprecation error + trigger_error( + 'util::slugify() now takes $css_mode as the third parameter, please update your code', + E_USER_DEPRECATED + ); + } + + $slug = preg_replace('/([^a-z0-9]+)/', $separator, strtolower(self::remove_accents($string))); + + if ($css_mode) { + $digits = array('zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'); + + if (is_numeric(substr($slug, 0, 1))) { + $slug = $digits[substr($slug, 0, 1)] . substr($slug, 1); + } + } + + return $slug; + } + + /** + * Checks to see if a string is utf8 encoded. + * + * NOTE: This function checks for 5-Byte sequences, UTF8 + * has Bytes Sequences with a maximum length of 4. + * + * Written by Tony Ferrara + * + * @param string $string The string to be checked + * @return boolean + */ + public static function seems_utf8($string) + { + if (function_exists('mb_check_encoding')) { + // If mbstring is available, this is significantly faster than + // using PHP regexps. + return mb_check_encoding($string, 'UTF-8'); + } + + // @codeCoverageIgnoreStart + return self::seemsUtf8Regex($string); + // @codeCoverageIgnoreEnd + } + + /** + * A non-Mbstring UTF-8 checker. + * + * @param $string + * @return bool + */ + protected static function seemsUtf8Regex($string) + { + // Obtained from http://stackoverflow.com/a/11709412/430062 with permission. + $regex = '/( + [\xC0-\xC1] # Invalid UTF-8 Bytes + | [\xF5-\xFF] # Invalid UTF-8 Bytes + | \xE0[\x80-\x9F] # Overlong encoding of prior code point + | \xF0[\x80-\x8F] # Overlong encoding of prior code point + | [\xC2-\xDF](?![\x80-\xBF]) # Invalid UTF-8 Sequence Start + | [\xE0-\xEF](?![\x80-\xBF]{2}) # Invalid UTF-8 Sequence Start + | [\xF0-\xF4](?![\x80-\xBF]{3}) # Invalid UTF-8 Sequence Start + | (?<=[\x0-\x7F\xF5-\xFF])[\x80-\xBF] # Invalid UTF-8 Sequence Middle + | (? + * + * @param string $brokenSerializedData + * @return string + */ + public static function fix_broken_serialization($brokenSerializedData) + { + $fixdSerializedData = preg_replace_callback('!s:(\d+):"(.*?)";!', function($matches) { + $snip = $matches[2]; + return 's:' . strlen($snip) . ':"' . $snip . '";'; + }, $brokenSerializedData); + + return $fixdSerializedData; + } + + /** + * Checks to see if the page is being server over SSL or not + * + * @return boolean + */ + public static function is_https() + { + return isset($_SERVER['HTTPS']) && !empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] != 'off'; + } + + /** + * Add or remove query arguments to the URL. + * + * @param mixed $newKey Either newkey or an associative array + * @param mixed $newValue Either newvalue or oldquery or uri + * @param mixed $uri URI or URL to append the queru/queries to. + * @return string + */ + public static function add_query_arg($newKey, $newValue = null, $uri = null) + { + // Was an associative array of key => value pairs passed? + if (is_array($newKey)) { + $newParams = $newKey; + + // Was the URL passed as an argument? + if (!is_null($newValue)) { + $uri = $newValue; + } elseif (!is_null($uri)) { + $uri = $uri; + } else { + $uri = self::array_get($_SERVER['REQUEST_URI'], ''); + } + } else { + $newParams = array($newKey => $newValue); + + // Was the URL passed as an argument? + $uri = is_null($uri) ? self::array_get($_SERVER['REQUEST_URI'], '') : $uri; + } + + // Parse the URI into it's components + $puri = parse_url($uri); + + if (isset($puri['query'])) { + parse_str($puri['query'], $queryParams); + $queryParams = array_merge($queryParams, $newParams); + } elseif (isset($puri['path']) && strstr($puri['path'], '=') !== false) { + $puri['query'] = $puri['path']; + unset($puri['path']); + parse_str($puri['query'], $queryParams); + $queryParams = array_merge($queryParams, $newParams); + } else { + $queryParams = $newParams; + } + + // Strip out any query params that are set to false. + // Properly handle valueless parameters. + foreach ($queryParams as $param => $value) { + if ($value === false) { + unset($queryParams[$param]); + } elseif ($value === null) { + $queryParams[$param] = ''; + } + } + + // Re-construct the query string + $puri['query'] = http_build_query($queryParams); + + // Strip = from valueless parameters. + $puri['query'] = preg_replace('/=(?=&|$)/', '', $puri['query']); + + + // Re-construct the entire URL + $nuri = self::http_build_url($puri); + + // Make the URI consistent with our input + if ($nuri[0] === '/' && strstr($uri, '/') === false) { + $nuri = substr($nuri, 1); + } + + if ($nuri[0] === '?' && strstr($uri, '?') === false) { + $nuri = substr($nuri, 1); + } + + return rtrim($nuri, '?'); + } + + /** + * Removes an item or list from the query string. + * + * @param string|array $keys Query key or keys to remove. + * @param bool $uri When false uses the $_SERVER value + * @return string + */ + public static function remove_query_arg($keys, $uri = null) + { + if (is_array($keys)) { + return self::add_query_arg(array_combine($keys, array_fill(0, count($keys), false)), $uri); + } + + return self::add_query_arg(array($keys => false), $uri); + } + + /** + * Build a URL. + * + * The parts of the second URL will be merged into the first according to + * the flags argument. + * + * @author Jake Smith + * @see https://github.com/jakeasmith/http_build_url/ + * + * @param mixed $url (part(s) of) an URL in form of a string or + * associative array like parse_url() returns + * @param mixed $parts same as the first argument + * @param int $flags a bitmask of binary or'ed HTTP_URL constants; + * HTTP_URL_REPLACE is the default + * @param array $new_url if set, it will be filled with the parts of the + * composed url like parse_url() would return + * @return string + */ + public static function http_build_url($url, $parts = array(), $flags = self::HTTP_URL_REPLACE, &$new_url = array()) + { + is_array($url) || $url = parse_url($url); + is_array($parts) || $parts = parse_url($parts); + + isset($url['query']) && is_string($url['query']) || $url['query'] = null; + isset($parts['query']) && is_string($parts['query']) || $parts['query'] = null; + + $keys = array('user', 'pass', 'port', 'path', 'query', 'fragment'); + + // HTTP_URL_STRIP_ALL and HTTP_URL_STRIP_AUTH cover several other flags. + if ($flags & self::HTTP_URL_STRIP_ALL) { + $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS + | self::HTTP_URL_STRIP_PORT | self::HTTP_URL_STRIP_PATH + | self::HTTP_URL_STRIP_QUERY | self::HTTP_URL_STRIP_FRAGMENT; + } elseif ($flags & self::HTTP_URL_STRIP_AUTH) { + $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS; + } + + // Schema and host are alwasy replaced + foreach (array('scheme', 'host') as $part) { + if (isset($parts[$part])) { + $url[$part] = $parts[$part]; + } + } + + if ($flags & self::HTTP_URL_REPLACE) { + foreach ($keys as $key) { + if (isset($parts[$key])) { + $url[$key] = $parts[$key]; + } + } + } else { + if (isset($parts['path']) && ($flags & self::HTTP_URL_JOIN_PATH)) { + if (isset($url['path']) && substr($parts['path'], 0, 1) !== '/') { + $url['path'] = rtrim( + str_replace(basename($url['path']), '', $url['path']), + '/' + ) . '/' . ltrim($parts['path'], '/'); + } else { + $url['path'] = $parts['path']; + } + } + + if (isset($parts['query']) && ($flags & self::HTTP_URL_JOIN_QUERY)) { + if (isset($url['query'])) { + parse_str($url['query'], $url_query); + parse_str($parts['query'], $parts_query); + + $url['query'] = http_build_query( + array_replace_recursive( + $url_query, + $parts_query + ) + ); + } else { + $url['query'] = $parts['query']; + } + } + } + + if (isset($url['path']) && substr($url['path'], 0, 1) !== '/') { + $url['path'] = '/' . $url['path']; + } + + foreach ($keys as $key) { + $strip = 'HTTP_URL_STRIP_' . strtoupper($key); + if ($flags & constant('utilphp\\util::' . $strip)) { + unset($url[$key]); + } + } + + $parsed_string = ''; + + if (isset($url['scheme'])) { + $parsed_string .= $url['scheme'] . '://'; + } + + if (isset($url['user'])) { + $parsed_string .= $url['user']; + + if (isset($url['pass'])) { + $parsed_string .= ':' . $url['pass']; + } + + $parsed_string .= '@'; + } + + if (isset($url['host'])) { + $parsed_string .= $url['host']; + } + + if (isset($url['port'])) { + $parsed_string .= ':' . $url['port']; + } + + if (!empty($url['path'])) { + $parsed_string .= $url['path']; + } else { + $parsed_string .= '/'; + } + + if (isset($url['query'])) { + $parsed_string .= '?' . $url['query']; + } + + if (isset($url['fragment'])) { + $parsed_string .= '#' . $url['fragment']; + } + + $new_url = $url; + + return $parsed_string; + } + + /** + * Converts many english words that equate to true or false to boolean. + * + * Supports 'y', 'n', 'yes', 'no' and a few other variations. + * + * @param string $string The string to convert to boolean + * @param bool $default The value to return if we can't match any + * yes/no words + * @return boolean + */ + public static function str_to_bool($string, $default = false) + { + $yes_words = 'affirmative|all right|aye|indubitably|most assuredly|ok|of course|okay|sure thing|y|yes+|yea|yep|sure|yeah|true|t|on|1|oui|vrai'; + $no_words = 'no*|no way|nope|nah|na|never|absolutely not|by no means|negative|never ever|false|f|off|0|non|faux'; + + if (preg_match('/^(' . $yes_words . ')$/i', $string)) { + return true; + } elseif (preg_match('/^(' . $no_words . ')$/i', $string)) { + return false; + } + + return $default; + } + + /** + * Check if a string starts with the given string. + * + * @param string $string + * @param string $starts_with + * @return boolean + */ + public static function starts_with($string, $starts_with) + { + return strpos($string, $starts_with) === 0; + } + + /** + * Check if a string ends with the given string. + * + * @param string $string + * @param string $starts_with + * @return boolean + */ + public static function ends_with($string, $ends_with) + { + return substr($string, -strlen($ends_with)) === $ends_with; + } + + /** + * Check if a string contains another string. + * + * @param string $haystack + * @param string $needle + * @return boolean + */ + public static function str_contains($haystack, $needle) + { + return strpos($haystack, $needle) !== false; + } + + /** + * Check if a string contains another string. This version is case + * insensitive. + * + * @param string $haystack + * @param string $needle + * @return boolean + */ + public static function str_icontains($haystack, $needle) + { + return stripos($haystack, $needle) !== false; + } + + /** + * Return the file extension of the given filename. + * + * @param string $filename + * @return string + */ + public static function get_file_ext($filename) + { + return pathinfo($filename, PATHINFO_EXTENSION); + } + + /** + * Removes a directory (and its contents) recursively. + * + * Contributed by Askar (ARACOOL) + * + * @param string $dir The directory to be deleted recursively + * @param bool $traverseSymlinks Delete contents of symlinks recursively + * @return bool + * @throws \RuntimeException + */ + public static function rmdir($dir, $traverseSymlinks = false) + { + if (!file_exists($dir)) { + return true; + } elseif (!is_dir($dir)) { + throw new \RuntimeException('Given path is not a directory'); + } + + if (!is_link($dir) || $traverseSymlinks) { + foreach (scandir($dir) as $file) { + if ($file === '.' || $file === '..') { + continue; + } + + $currentPath = $dir . '/' . $file; + + if (is_dir($currentPath)) { + self::rmdir($currentPath, $traverseSymlinks); + } elseif (!unlink($currentPath)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $currentPath); + // @codeCoverageIgnoreEnd + } + } + } + + // Windows treats removing directory symlinks identically to removing directories. + if (is_link($dir) && !defined('PHP_WINDOWS_VERSION_MAJOR')) { + if (!unlink($dir)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $dir); + // @codeCoverageIgnoreEnd + } + } else { + if (!rmdir($dir)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $dir); + // @codeCoverageIgnoreEnd + } + } + + return true; + } + + /** + * Convert entities, while preserving already-encoded entities. + * + * @param string $string The text to be converted + * @return string + */ + public static function htmlentities($string, $preserve_encoded_entities = false) + { + if ($preserve_encoded_entities) { + // @codeCoverageIgnoreStart + if (defined('HHVM_VERSION')) { + $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES); + } else { + $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES, self::mbInternalEncoding()); + } + // @codeCoverageIgnoreEnd + + $translation_table[chr(38)] = '&'; + return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); + } + + return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); + } + + /** + * Convert >, <, ', " and & to html entities, but preserves entities that + * are already encoded. + * + * @param string $string The text to be converted + * @return string + */ + public static function htmlspecialchars($string, $preserve_encoded_entities = false) + { + if ($preserve_encoded_entities) { + // @codeCoverageIgnoreStart + if (defined('HHVM_VERSION')) { + $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES); + } else { + $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES, self::mbInternalEncoding()); + } + // @codeCoverageIgnoreEnd + + $translation_table[chr(38)] = '&'; + + return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); + } + + return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); + } + + /** + * Transliterates characters to their ASCII equivalents. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + * + * @param string $text Text that might have not-ASCII characters + * @param string $language Specifies a priority for a specific language. + * @return string Filtered string with replaced "nice" characters + */ + public static function downcode($text, $language = '') + { + self::initLanguageMap($language); + + if (self::seems_utf8($text)) { + if (preg_match_all(self::$regex, $text, $matches)) { + for ($i = 0; $i < count($matches[0]); $i++) { + $char = $matches[0][$i]; + if (isset(self::$map[$char])) { + $text = str_replace($char, self::$map[$char], $text); + } + } + } + } else { + // Not a UTF-8 string so we assume its ISO-8859-1 + $search = "\x80\x83\x8a\x8e\x9a\x9e\x9f\xa2\xa5\xb5\xc0\xc1\xc2\xc3\xc4\xc5\xc7\xc8\xc9\xca\xcb\xcc\xcd"; + $search .= "\xce\xcf\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xe0\xe1\xe2\xe3\xe4\xe5\xe7\xe8\xe9"; + $search .= "\xea\xeb\xec\xed\xee\xef\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xff"; + $text = strtr($text, $search, 'EfSZszYcYuAAAAAACEEEEIIIINOOOOOOUUUUYaaaaaaceeeeiiiinoooooouuuuyy'); + + // These latin characters should be represented by two characters so + // we can't use strtr + $complexSearch = array("\x8c", "\x9c", "\xc6", "\xd0", "\xde", "\xdf", "\xe6", "\xf0", "\xfe"); + $complexReplace = array('OE', 'oe', 'AE', 'DH', 'TH', 'ss', 'ae', 'dh', 'th'); + $text = str_replace($complexSearch, $complexReplace, $text); + } + + return $text; + } + + /** + * Converts all accent characters to ASCII characters. + * + * If there are no accent characters, then the string given is just + * returned. + * + * @param string $string Text that might have accent characters + * @param string $language Specifies a priority for a specific language. + * @return string Filtered string with replaced "nice" characters + */ + public static function remove_accents($string, $language = '') + { + if (!preg_match('/[\x80-\xff]/', $string)) { + return $string; + } + + return self::downcode($string, $language); + } + + /** + * Strip all witespaces from the given string. + * + * @param string $string The string to strip + * @return string + */ + public static function strip_space($string) + { + return preg_replace('/\s+/', '', $string); + } + + /** + * Sanitize a string by performing the following operation : + * - Remove accents + * - Lower the string + * - Remove punctuation characters + * - Strip whitespaces + * + * @param string $string the string to sanitize + * @return string + */ + public static function sanitize_string($string) + { + $string = self::remove_accents($string); + $string = strtolower($string); + $string = preg_replace('/[^a-zA-Z 0-9]+/', '', $string); + $string = self::strip_space($string); + + return $string; + } + + /** + * Pads a given string with zeroes on the left. + * + * @param int $number The number to pad + * @param int $length The total length of the desired string + * @return string + */ + public static function zero_pad($number, $length) + { + return str_pad($number, $length, '0', STR_PAD_LEFT); + } + + /** + * Converts a unix timestamp to a relative time string, such as "3 days ago" + * or "2 weeks ago". + * + * @param int $from The date to use as a starting point + * @param int $to The date to compare to, defaults to now + * @param string $suffix The string to add to the end, defaults to " ago" + * @return string + */ + public static function human_time_diff($from, $to = '', $as_text = false, $suffix = ' ago') + { + if ($to == '') { + $to = time(); + } + + $from = new \DateTime(date('Y-m-d H:i:s', $from)); + $to = new \DateTime(date('Y-m-d H:i:s', $to)); + $diff = $from->diff($to); + + if ($diff->y > 1) { + $text = $diff->y . ' years'; + } elseif ($diff->y == 1) { + $text = '1 year'; + } elseif ($diff->m > 1) { + $text = $diff->m . ' months'; + } elseif ($diff->m == 1) { + $text = '1 month'; + } elseif ($diff->d > 7) { + $text = ceil($diff->d / 7) . ' weeks'; + } elseif ($diff->d == 7) { + $text = '1 week'; + } elseif ($diff->d > 1) { + $text = $diff->d . ' days'; + } elseif ($diff->d == 1) { + $text = '1 day'; + } elseif ($diff->h > 1) { + $text = $diff->h . ' hours'; + } elseif ($diff->h == 1) { + $text = ' 1 hour'; + } elseif ($diff->i > 1) { + $text = $diff->i . ' minutes'; + } elseif ($diff->i == 1) { + $text = '1 minute'; + } elseif ($diff->s > 1) { + $text = $diff->s . ' seconds'; + } else { + $text = '1 second'; + } + + if ($as_text) { + $text = explode(' ', $text, 2); + $text = self::number_to_word($text[0]) . ' ' . $text[1]; + } + + return trim($text) . $suffix; + } + + /** + * Converts a number into the text equivalent. For example, 456 becomes four + * hundred and fifty-six. + * + * Part of the IntToWords Project. + * + * @param int|float $number The number to convert into text + * @return string + */ + public static function number_to_word($number) + { + $number = (string) $number; + + if (strpos($number, '.') !== false) { + list($number, $decimal) = explode('.', $number); + } else { + $decimal = false; + } + + $output = ''; + + if ($number[0] == '-') { + $output = 'negative '; + $number = ltrim($number, '-'); + } elseif ($number[0] == '+') { + $output = 'positive '; + $number = ltrim($number, '+'); + } + + if ($number[0] == '0') { + $output .= 'zero'; + } else { + $length = 19; + $number = str_pad($number, 60, '0', STR_PAD_LEFT); + $group = rtrim(chunk_split($number, 3, ' '), ' '); + $groups = explode(' ', $group); + $groups2 = array(); + + foreach ($groups as $group) { + $group[1] = isset($group[1]) ? $group[1] : null; + $group[2] = isset($group[2]) ? $group[2] : null; + $groups2[] = self::numberToWordThreeDigits($group[0], $group[1], $group[2]); + } + + for ($z = 0; $z < count($groups2); $z++) { + if ($groups2[$z] != '') { + $output .= $groups2[$z] . self::numberToWordConvertGroup($length - $z); + $output .= ($z < $length && ! array_search('', array_slice($groups2, $z + 1, -1)) && $groups2[$length] != '' && $groups[$length][0] == '0' ? ' and ' : ', '); + } + } + + $output = rtrim($output, ', '); + } + + if ($decimal > 0) { + $output .= ' point'; + + for ($i = 0; $i < strlen($decimal); $i++) { + $output .= ' ' . self::numberToWordConvertDigit($decimal[$i]); + } + } + + return $output; + } + + protected static function numberToWordConvertGroup($index) + { + switch($index) { + case 11: + return ' decillion'; + case 10: + return ' nonillion'; + case 9: + return ' octillion'; + case 8: + return ' septillion'; + case 7: + return ' sextillion'; + case 6: + return ' quintrillion'; + case 5: + return ' quadrillion'; + case 4: + return ' trillion'; + case 3: + return ' billion'; + case 2: + return ' million'; + case 1: + return ' thousand'; + case 0: + return ''; + } + + return ''; + } + + protected static function numberToWordThreeDigits($digit1, $digit2, $digit3) + { + $output = ''; + + if ($digit1 == '0' && $digit2 == '0' && $digit3 == '0') { + return ''; + } + + if ($digit1 != '0') { + $output .= self::numberToWordConvertDigit($digit1) . ' hundred'; + + if ($digit2 != '0' || $digit3 != '0') { + $output .= ' and '; + } + } + if ($digit2 != '0') { + $output .= self::numberToWordTwoDigits($digit2, $digit3); + } elseif ($digit3 != '0') { + $output .= self::numberToWordConvertDigit($digit3); + } + + return $output; + } + + protected static function numberToWordTwoDigits($digit1, $digit2) + { + if ($digit2 == '0') { + switch ($digit1) { + case '1': + return 'ten'; + case '2': + return 'twenty'; + case '3': + return 'thirty'; + case '4': + return 'forty'; + case '5': + return 'fifty'; + case '6': + return 'sixty'; + case '7': + return 'seventy'; + case '8': + return 'eighty'; + case '9': + return 'ninety'; + } + } elseif ($digit1 == '1') { + switch ($digit2) { + case '1': + return 'eleven'; + case '2': + return 'twelve'; + case '3': + return 'thirteen'; + case '4': + return 'fourteen'; + case '5': + return 'fifteen'; + case '6': + return 'sixteen'; + case '7': + return 'seventeen'; + case '8': + return 'eighteen'; + case '9': + return 'nineteen'; + } + } else { + $second_digit = self::numberToWordConvertDigit($digit2); + + switch ($digit1) { + case '2': + return "twenty-{$second_digit}"; + case '3': + return "thirty-{$second_digit}"; + case '4': + return "forty-{$second_digit}"; + case '5': + return "fifty-{$second_digit}"; + case '6': + return "sixty-{$second_digit}"; + case '7': + return "seventy-{$second_digit}"; + case '8': + return "eighty-{$second_digit}"; + case '9': + return "ninety-{$second_digit}"; + } + } + } + + /** + * @param $digit + * @return string + * @throws \LogicException + */ + protected static function numberToWordConvertDigit($digit) + { + switch ($digit) { + case '0': + return 'zero'; + case '1': + return 'one'; + case '2': + return 'two'; + case '3': + return 'three'; + case '4': + return 'four'; + case '5': + return 'five'; + case '6': + return 'six'; + case '7': + return 'seven'; + case '8': + return 'eight'; + case '9': + return 'nine'; + default: + throw new \LogicException('Not a number'); + } + } + + /** + * Calculates percentage of numerator and denominator. + * + * @param int|float $numerator + * @param int|float $denominator + * @param int $decimals + * @param string $dec_point + * @param string $thousands_sep + * @return int|float + */ + public static function calculate_percentage($numerator, $denominator, $decimals = 2, $dec_point = '.', $thousands_sep = ',') + { + return number_format(($numerator / $denominator) * 100, $decimals, $dec_point, $thousands_sep); + } + + /** + * Transmit UTF-8 content headers if the headers haven't already been sent. + * + * @param string $content_type The content type to send out + * @return boolean + */ + public static function utf8_headers($content_type = 'text/html') + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + header('Content-type: ' . $content_type . '; charset=utf-8'); + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Transmit headers that force a browser to display the download file + * dialog. Cross browser compatible. Only fires if headers have not + * already been sent. + * + * @param string $filename The name of the filename to display to + * browsers + * @param string $content The content to output for the download. + * If you don't specify this, just the + * headers will be sent + * @return boolean + */ + public static function force_download($filename, $content = false) + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + // Required for some browsers + if (ini_get('zlib.output_compression')) { + @ini_set('zlib.output_compression', 'Off'); + } + + header('Pragma: public'); + header('Expires: 0'); + header('Cache-Control: must-revalidate, post-check=0, pre-check=0'); + + // Required for certain browsers + header('Cache-Control: private', false); + + header('Content-Disposition: attachment; filename="' . basename(str_replace('"', '', $filename)) . '";'); + header('Content-Type: application/force-download'); + header('Content-Transfer-Encoding: binary'); + + if ($content) { + header('Content-Length: ' . strlen($content)); + } + + ob_clean(); + flush(); + + if ($content) { + echo $content; + } + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Sets the headers to prevent caching for the different browsers. + * + * Different browsers support different nocache headers, so several + * headers must be sent so that all of them get the point that no + * caching should occur + * + * @return boolean + */ + public static function nocache_headers() + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + header('Expires: Wed, 11 Jan 1984 05:00:00 GMT'); + header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT'); + header('Cache-Control: no-cache, must-revalidate, max-age=0'); + header('Pragma: no-cache'); + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Generates a string of random characters. + * + * @throws LengthException If $length is bigger than the available + * character pool and $no_duplicate_chars is + * enabled + * + * @param integer $length The length of the string to + * generate + * @param boolean $human_friendly Whether or not to make the + * string human friendly by + * removing characters that can be + * confused with other characters ( + * O and 0, l and 1, etc) + * @param boolean $include_symbols Whether or not to include + * symbols in the string. Can not + * be enabled if $human_friendly is + * true + * @param boolean $no_duplicate_chars Whether or not to only use + * characters once in the string. + * @return string + */ + public static function random_string($length = 16, $human_friendly = true, $include_symbols = false, $no_duplicate_chars = false) + { + $nice_chars = 'ABCDEFGHJKLMNPQRSTUVWXYZabcdefhjkmnprstuvwxyz23456789'; + $all_an = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890'; + $symbols = '!@#$%^&*()~_-=+{}[]|:;<>,.?/"\'\\`'; + $string = ''; + + // Determine the pool of available characters based on the given parameters + if ($human_friendly) { + $pool = $nice_chars; + } else { + $pool = $all_an; + + if ($include_symbols) { + $pool .= $symbols; + } + } + + if (!$no_duplicate_chars) { + return substr(str_shuffle(str_repeat($pool, $length)), 0, $length); + } + + // Don't allow duplicate letters to be disabled if the length is + // longer than the available characters + if ($no_duplicate_chars && strlen($pool) < $length) { + throw new \LengthException('$length exceeds the size of the pool and $no_duplicate_chars is enabled'); + } + + // Convert the pool of characters into an array of characters and + // shuffle the array + $pool = str_split($pool); + $poolLength = count($pool); + $rand = mt_rand(0, $poolLength - 1); + + // Generate our string + for ($i = 0; $i < $length; $i++) { + $string .= $pool[$rand]; + + // Remove the character from the array to avoid duplicates + array_splice($pool, $rand, 1); + + // Generate a new number + if (($poolLength - 2 - $i) > 0) { + $rand = mt_rand(0, $poolLength - 2 - $i); + } else { + $rand = 0; + } + } + + return $string; + } + + /** + * Generate secure random string of given length + * If 'openssl_random_pseudo_bytes' is not available + * then generate random string using default function + * + * Part of the Laravel Project + * + * @param int $length length of string + * @return bool + */ + public static function secure_random_string($length = 16) + { + if (function_exists('openssl_random_pseudo_bytes')) { + $bytes = openssl_random_pseudo_bytes($length * 2); + + if ($bytes === false) { + throw new \LengthException('$length is not accurate, unable to generate random string'); + } + + return substr(str_replace(array('/', '+', '='), '', base64_encode($bytes)), 0, $length); + } + + // @codeCoverageIgnoreStart + return static::random_string($length); + // @codeCoverageIgnoreEnd + } + + /** + * Check if a given string matches a given pattern. + * + * Contributed by Abhimanyu Sharma + * + * @param string $pattern Parttern of string exptected + * @param string $string String that need to be matched + * @return bool + */ + public static function match_string($pattern, $string, $caseSensitive = true) + { + if ($pattern == $string) { + return true; + } + + // Preg flags + $flags = $caseSensitive ? '' : 'i'; + + // Escape any regex special characters + $pattern = preg_quote($pattern, '#'); + + // Unescape * which is our wildcard character and change it to .* + $pattern = str_replace('\*', '.*', $pattern); + + return (bool) preg_match('#^' . $pattern . '$#' . $flags, $string); + } + + /** + * Validate an email address. + * + * @param string $possible_email An email address to validate + * @return bool + */ + public static function validate_email($possible_email) + { + return (bool) filter_var($possible_email, FILTER_VALIDATE_EMAIL); + } + + /** + * Return the URL to a user's gravatar. + * + * @param string $email The email of the user + * @param integer $size The size of the gravatar + * @return string + */ + public static function get_gravatar($email, $size = 32) + { + if (self::is_https()) { + $url = 'https://secure.gravatar.com/'; + } else { + $url = 'http://www.gravatar.com/'; + } + + $url .= 'avatar/' . md5($email) . '?s=' . (int) abs($size); + + return $url; + } + + /** + * Turns all of the links in a string into HTML links. + * + * Part of the LinkifyURL Project + * + * @param string $text The string to parse + * @return string + */ + public static function linkify($text) + { + $text = preg_replace('/'/', ''', $text); // IE does not handle ' entity! + $section_html_pattern = '%# Rev:20100913_0900 github.com/jmrware/LinkifyURL + # Section text into HTML tags and everything else. + ( # $1: Everything not HTML tag. + [^<]+(?:(?!... tag. + ]*> # opening tag. + [^<]*(?:(?! # closing tag. + ) # End $2: + %ix'; + + return preg_replace_callback($section_html_pattern, array(__CLASS__, 'linkifyCallback'), $text); + } + + /** + * Callback for the preg_replace in the linkify() method. + * + * Part of the LinkifyURL Project + * + * @param array $matches Matches from the preg_ function + * @return string + */ + protected static function linkifyRegex($text) + { + $url_pattern = '/# Rev:20100913_0900 github.com\/jmrware\/LinkifyURL + # Match http & ftp URL that is not already linkified. + # Alternative 1: URL delimited by (parentheses). + (\() # $1 "(" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $2: URL. + (\)) # $3: ")" end delimiter. + | # Alternative 2: URL delimited by [square brackets]. + (\[) # $4: "[" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $5: URL. + (\]) # $6: "]" end delimiter. + | # Alternative 3: URL delimited by {curly braces}. + (\{) # $7: "{" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $8: URL. + (\}) # $9: "}" end delimiter. + | # Alternative 4: URL delimited by . + (<|&(?:lt|\#60|\#x3c);) # $10: "<" start delimiter (or HTML entity). + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $11: URL. + (>|&(?:gt|\#62|\#x3e);) # $12: ">" end delimiter (or HTML entity). + | # Alternative 5: URL not delimited by (), [], {} or <>. + (# $13: Prefix proving URL not already linked. + (?: ^ # Can be a beginning of line or string, or + | [^=\s\'"\]] # a non-"=", non-quote, non-"]", followed by + ) \s*[\'"]? # optional whitespace and optional quote; + | [^=\s]\s+ # or... a non-equals sign followed by whitespace. + ) # End $13. Non-prelinkified-proof prefix. + (\b # $14: Other non-delimited URL. + (?:ht|f)tps?:\/\/ # Required literal http, https, ftp or ftps prefix. + [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]+ # All URI chars except "&" (normal*). + (?: # Either on a "&" or at the end of URI. + (?! # Allow a "&" char only if not start of an... + &(?:gt|\#0*62|\#x0*3e); # HTML ">" entity, or + | &(?:amp|apos|quot|\#0*3[49]|\#x0*2[27]); # a [&\'"] entity if + [.!&\',:?;]? # followed by optional punctuation then + (?:[^a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]|$) # a non-URI char or EOS. + ) & # If neg-assertion true, match "&" (special). + [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]* # More non-& URI chars (normal*). + )* # Unroll-the-loop (special normal*)*. + [a-z0-9\-_~$()*+=\/#[\]@%] # Last char can\'t be [.!&\',;:?] + ) # End $14. Other non-delimited URL. + /imx'; + + $url_replace = '$1$4$7$10$13$2$5$8$11$14$3$6$9$12'; + + return preg_replace($url_pattern, $url_replace, $text); + } + + /** + * Callback for the preg_replace in the linkify() method. + * + * Part of the LinkifyURL Project + * + * @param array $matches Matches from the preg_ function + * @return string + */ + protected static function linkifyCallback($matches) + { + if (isset($matches[2])) { + return $matches[2]; + } + + return self::linkifyRegex($matches[1]); + } + + /** + * Return the current URL. + * + * @return string + */ + public static function get_current_url() + { + $url = ''; + + // Check to see if it's over https + $is_https = self::is_https(); + if ($is_https) { + $url .= 'https://'; + } else { + $url .= 'http://'; + } + + // Was a username or password passed? + if (isset($_SERVER['PHP_AUTH_USER'])) { + $url .= $_SERVER['PHP_AUTH_USER']; + + if (isset($_SERVER['PHP_AUTH_PW'])) { + $url .= ':' . $_SERVER['PHP_AUTH_PW']; + } + + $url .= '@'; + } + + + // We want the user to stay on the same host they are currently on, + // but beware of security issues + // see http://shiflett.org/blog/2006/mar/server-name-versus-http-host + $url .= $_SERVER['HTTP_HOST']; + + $port = $_SERVER['SERVER_PORT']; + + // Is it on a non standard port? + if ($is_https && ($port != 443)) { + $url .= ':' . $_SERVER['SERVER_PORT']; + } elseif (!$is_https && ($port != 80)) { + $url .= ':' . $_SERVER['SERVER_PORT']; + } + + // Get the rest of the URL + if (!isset($_SERVER['REQUEST_URI'])) { + // Microsoft IIS doesn't set REQUEST_URI by default + $url .= $_SERVER['PHP_SELF']; + + if (isset($_SERVER['QUERY_STRING'])) { + $url .= '?' . $_SERVER['QUERY_STRING']; + } + } else { + $url .= $_SERVER['REQUEST_URI']; + } + + return $url; + } + + /** + * Returns the IP address of the client. + * + * @param boolean $trust_proxy_headers Whether or not to trust the + * proxy headers HTTP_CLIENT_IP + * and HTTP_X_FORWARDED_FOR. ONLY + * use if your server is behind a + * proxy that sets these values + * @return string + */ + public static function get_client_ip($trust_proxy_headers = false) + { + if (!$trust_proxy_headers) { + return $_SERVER['REMOTE_ADDR']; + } + + if (!empty($_SERVER['HTTP_CLIENT_IP'])) { + $ip = $_SERVER['HTTP_CLIENT_IP']; + } elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { + $ip = $_SERVER['HTTP_X_FORWARDED_FOR']; + } else { + $ip = $_SERVER['REMOTE_ADDR']; + } + + return $ip; + } + + /** + * Truncate a string to a specified length without cutting a word off. + * + * @param string $string The string to truncate + * @param integer $length The length to truncate the string to + * @param string $append Text to append to the string IF it gets + * truncated, defaults to '...' + * @return string + */ + public static function safe_truncate($string, $length, $append = '...') + { + $ret = substr($string, 0, $length); + $last_space = strrpos($ret, ' '); + + if ($last_space !== false && $string != $ret) { + $ret = substr($ret, 0, $last_space); + } + + if ($ret != $string) { + $ret .= $append; + } + + return $ret; + } + + + /** + * Truncate the string to given length of characters. + * + * @param string $string The variable to truncate + * @param integer $limit The length to truncate the string to + * @param string $append Text to append to the string IF it gets + * truncated, defaults to '...' + * @return string + */ + public static function limit_characters($string, $limit = 100, $append = '...') + { + if (mb_strlen($string) <= $limit) { + return $string; + } + + return rtrim(mb_substr($string, 0, $limit, 'UTF-8')) . $append; + } + + /** + * Truncate the string to given length of words. + * + * @param $string + * @param $limit + * @param string $append + * @return string + */ + public static function limit_words($string, $limit = 100, $append = '...') + { + preg_match('/^\s*+(?:\S++\s*+){1,' . $limit . '}/u', $string, $matches); + + if (!isset($matches[0]) || strlen($string) === strlen($matches[0])) { + return $string; + } + + return rtrim($matches[0]).$append; + } + + /** + * Returns the ordinal version of a number (appends th, st, nd, rd). + * + * @param string $number The number to append an ordinal suffix to + * @return string + */ + public static function ordinal($number) + { + $test_c = abs($number) % 10; + $ext = ((abs($number) % 100 < 21 && abs($number) % 100 > 4) ? 'th' : (($test_c < 4) ? ($test_c < 3) ? ($test_c < 2) ? ($test_c < 1) ? 'th' : 'st' : 'nd' : 'rd' : 'th')); + + return $number . $ext; + } + + /** + * Returns the file permissions as a nice string, like -rw-r--r-- or false + * if the file is not found. + * + * @param string $file The name of the file to get permissions form + * @param int $perms Numerical value of permissions to display as text. + * @return string + */ + public static function full_permissions($file, $perms = null) + { + if (is_null($perms)) { + if (!file_exists($file)) { + return false; + } + $perms = fileperms($file); + } + + if (($perms & 0xC000) == 0xC000) { + // Socket + $info = 's'; + } elseif (($perms & 0xA000) == 0xA000) { + // Symbolic Link + $info = 'l'; + } elseif (($perms & 0x8000) == 0x8000) { + // Regular + $info = '-'; + } elseif (($perms & 0x6000) == 0x6000) { + // Block special + $info = 'b'; + } elseif (($perms & 0x4000) == 0x4000) { + // Directory + $info = 'd'; + } elseif (($perms & 0x2000) == 0x2000) { + // Character special + $info = 'c'; + } elseif (($perms & 0x1000) == 0x1000) { + // FIFO pipe + $info = 'p'; + } else { + // Unknown + $info = 'u'; + } + + // Owner + $info .= (($perms & 0x0100) ? 'r' : '-'); + $info .= (($perms & 0x0080) ? 'w' : '-'); + $info .= (($perms & 0x0040) ? + (($perms & 0x0800) ? 's' : 'x') : + (($perms & 0x0800) ? 'S' : '-')); + + // Group + $info .= (($perms & 0x0020) ? 'r' : '-'); + $info .= (($perms & 0x0010) ? 'w' : '-'); + $info .= (($perms & 0x0008) ? + (($perms & 0x0400) ? 's' : 'x') : + (($perms & 0x0400) ? 'S' : '-')); + + // World + $info .= (($perms & 0x0004) ? 'r' : '-'); + $info .= (($perms & 0x0002) ? 'w' : '-'); + $info .= (($perms & 0x0001) ? + (($perms & 0x0200) ? 't' : 'x') : + (($perms & 0x0200) ? 'T' : '-')); + + return $info; + } + + /** + * Returns the first element in an array. + * + * @param array $array + * @return mixed + */ + public static function array_first(array $array) + { + return reset($array); + } + + /** + * Returns the last element in an array. + * + * @param array $array + * @return mixed + */ + public static function array_last(array $array) + { + return end($array); + } + + /** + * Returns the first key in an array. + * + * @param array $array + * @return int|string + */ + public static function array_first_key(array $array) + { + reset($array); + + return key($array); + } + + /** + * Returns the last key in an array. + * + * @param array $array + * @return int|string + */ + public static function array_last_key(array $array) + { + end($array); + + return key($array); + } + + /** + * Flatten a multi-dimensional array into a one dimensional array. + * + * Contributed by Theodore R. Smith of PHP Experts, Inc. + * + * @param array $array The array to flatten + * @param boolean $preserve_keys Whether or not to preserve array keys. + * Keys from deeply nested arrays will + * overwrite keys from shallowy nested arrays + * @return array + */ + public static function array_flatten(array $array, $preserve_keys = true) + { + $flattened = array(); + + array_walk_recursive($array, function($value, $key) use (&$flattened, $preserve_keys) { + if ($preserve_keys && !is_int($key)) { + $flattened[$key] = $value; + } else { + $flattened[] = $value; + } + }); + + return $flattened; + } + + /** + * Accepts an array, and returns an array of values from that array as + * specified by $field. For example, if the array is full of objects + * and you call util::array_pluck($array, 'name'), the function will + * return an array of values from $array[]->name. + * + * @param array $array An array + * @param string $field The field to get values from + * @param boolean $preserve_keys Whether or not to preserve the + * array keys + * @param boolean $remove_nomatches If the field doesn't appear to be set, + * remove it from the array + * @return array + */ + public static function array_pluck(array $array, $field, $preserve_keys = true, $remove_nomatches = true) + { + $new_list = array(); + + foreach ($array as $key => $value) { + if (is_object($value)) { + if (isset($value->{$field})) { + if ($preserve_keys) { + $new_list[$key] = $value->{$field}; + } else { + $new_list[] = $value->{$field}; + } + } elseif (!$remove_nomatches) { + $new_list[$key] = $value; + } + } else { + if (isset($value[$field])) { + if ($preserve_keys) { + $new_list[$key] = $value[$field]; + } else { + $new_list[] = $value[$field]; + } + } elseif (!$remove_nomatches) { + $new_list[$key] = $value; + } + } + } + + return $new_list; + } + + /** + * Searches for a given value in an array of arrays, objects and scalar + * values. You can optionally specify a field of the nested arrays and + * objects to search in. + * + * @param array $array The array to search + * @param scalar $search The value to search for + * @param string $field The field to search in, if not specified + * all fields will be searched + * @return boolean|scalar False on failure or the array key on success + */ + public static function array_search_deep(array $array, $search, $field = false) + { + // *grumbles* stupid PHP type system + $search = (string) $search; + + foreach ($array as $key => $elem) { + // *grumbles* stupid PHP type system + $key = (string) $key; + + if ($field) { + if (is_object($elem) && $elem->{$field} === $search) { + return $key; + } elseif (is_array($elem) && $elem[$field] === $search) { + return $key; + } elseif (is_scalar($elem) && $elem === $search) { + return $key; + } + } else { + if (is_object($elem)) { + $elem = (array) $elem; + + if (in_array($search, $elem)) { + return $key; + } + } elseif (is_array($elem) && in_array($search, $elem)) { + return $key; + } elseif (is_scalar($elem) && $elem === $search) { + return $key; + } + } + } + + return false; + } + + /** + * Returns an array containing all the elements of arr1 after applying + * the callback function to each one. + * + * @param string $callback Callback function to run for each + * element in each array + * @param array $array An array to run through the callback + * function + * @param boolean $on_nonscalar Whether or not to call the callback + * function on nonscalar values + * (Objects, resources, etc) + * @return array + */ + public static function array_map_deep(array $array, $callback, $on_nonscalar = false) + { + foreach ($array as $key => $value) { + if (is_array($value)) { + $args = array($value, $callback, $on_nonscalar); + $array[$key] = call_user_func_array(array(__CLASS__, __FUNCTION__), $args); + } elseif (is_scalar($value) || $on_nonscalar) { + $array[$key] = call_user_func($callback, $value); + } + } + + return $array; + } + + public static function array_clean(array $array) + { + return array_filter($array); + } + + /** + * Wrapper to prevent errors if the user doesn't have the mbstring + * extension installed. + * + * @param string $encoding + * @return string + */ + protected static function mbInternalEncoding($encoding = null) + { + if (function_exists('mb_internal_encoding')) { + return $encoding ? mb_internal_encoding($encoding) : mb_internal_encoding(); + } + + // @codeCoverageIgnoreStart + return 'UTF-8'; + // @codeCoverageIgnoreEnd + } + + /** + * Set the writable bit on a file to the minimum value that allows the user + * running PHP to write to it. + * + * @param string $filename The filename to set the writable bit on + * @param boolean $writable Whether to make the file writable or not + * @return boolean + */ + public static function set_writable($filename, $writable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($writable) { + // Set only the user writable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0200); + } + + // Set only the group writable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0220); + } + + // Set the world writable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0222); + } else { + // Set only the user writable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0222) ^ 0222); + } + + // Set only the group writable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0222) ^ 0022); + } + + // Set the world writable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0222) ^ 0002); + } + } + + /** + * Set the readable bit on a file to the minimum value that allows the user + * running PHP to read to it. + * + * @param string $filename The filename to set the readable bit on + * @param boolean $readable Whether to make the file readable or not + * @return boolean + */ + public static function set_readable($filename, $readable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($readable) { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0400); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0440); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0444); + } else { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0444) ^ 0444); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0444) ^ 0044); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0444) ^ 0004); + } + } + + /** + * Set the executable bit on a file to the minimum value that allows the + * user running PHP to read to it. + * + * @param string $filename The filename to set the executable bit on + * @param boolean $executable Whether to make the file executable or not + * @return boolean + */ + public static function set_executable($filename, $executable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($executable) { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0100); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0110); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0111); + } else { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0111) ^ 0111); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0111) ^ 0011); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0111) ^ 0001); + } + } + + /** + * Returns size of a given directory in bytes. + * + * @param string $dir + * @return integer + */ + public static function directory_size($dir) + { + $size = 0; + foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $file => $key) { + if ($key->isFile()) { + $size += $key->getSize(); + } + } + return $size; + } + + /** + * Returns a home directory of current user. + * + * @return string + */ + public static function get_user_directory() + { + if (isset($_SERVER['HOMEDRIVE'])) return $_SERVER['HOMEDRIVE'] . $_SERVER['HOMEPATH']; + else return $_SERVER['HOME']; + } + + /** + * Returns all paths inside a directory. + * + * @param string $dir + * @return array + */ + public static function directory_contents($dir) + { + $contents = array(); + foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::KEY_AS_PATHNAME | \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $pathname => $fi) { + $contents[] = $pathname; + } + natsort($contents); + return $contents; + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts new file mode 100644 index 0000000..af10d41 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts @@ -0,0 +1,89 @@ +declare namespace npmRunPath { + interface RunPathOptions { + /** + Working directory. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key). + + Set it to an empty string to exclude the default PATH. + */ + readonly path?: string; + + /** + Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. + + This can be either an absolute path or a path relative to the `cwd` option. + + @default process.execPath + */ + readonly execPath?: string; + } + + interface ProcessEnv { + [key: string]: string | undefined; + } + + interface EnvOptions { + /** + Working directory. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. + */ + readonly env?: ProcessEnv; + + /** + Path to the current Node.js executable. Its directory is pushed to the front of PATH. + + This can be either an absolute path or a path relative to the `cwd` option. + + @default process.execPath + */ + readonly execPath?: string; + } +} + +declare const npmRunPath: { + /** + Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries. + + @returns The augmented path string. + + @example + ``` + import * as childProcess from 'child_process'; + import npmRunPath = require('npm-run-path'); + + console.log(process.env.PATH); + //=> '/usr/local/bin' + + console.log(npmRunPath()); + //=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' + + // `foo` is a locally installed binary + childProcess.execFileSync('foo', { + env: npmRunPath.env() + }); + ``` + */ + (options?: npmRunPath.RunPathOptions): string; + + /** + @returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. + */ + env(options?: npmRunPath.EnvOptions): npmRunPath.ProcessEnv; + + // TODO: Remove this for the next major release + default: typeof npmRunPath; +}; + +export = npmRunPath; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js new file mode 100644 index 0000000..8c94abc --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js @@ -0,0 +1,47 @@ +'use strict'; +const path = require('path'); +const pathKey = require('path-key'); + +const npmRunPath = options => { + options = { + cwd: process.cwd(), + path: process.env[pathKey()], + execPath: process.execPath, + ...options + }; + + let previous; + let cwdPath = path.resolve(options.cwd); + const result = []; + + while (previous !== cwdPath) { + result.push(path.join(cwdPath, 'node_modules/.bin')); + previous = cwdPath; + cwdPath = path.resolve(cwdPath, '..'); + } + + // Ensure the running `node` binary is used + const execPathDir = path.resolve(options.cwd, options.execPath, '..'); + result.push(execPathDir); + + return result.concat(options.path).join(path.delimiter); +}; + +module.exports = npmRunPath; +// TODO: Remove this for the next major release +module.exports.default = npmRunPath; + +module.exports.env = options => { + options = { + env: process.env, + ...options + }; + + const env = {...options.env}; + const path = pathKey({env}); + + options.path = env[path]; + env[path] = module.exports(options); + + return env; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json new file mode 100644 index 0000000..feb8c00 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json @@ -0,0 +1,44 @@ +{ + "name": "npm-run-path", + "version": "4.0.1", + "description": "Get your PATH prepended with locally installed binaries", + "license": "MIT", + "repository": "sindresorhus/npm-run-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "npm", + "run", + "path", + "package", + "bin", + "binary", + "binaries", + "script", + "cli", + "command-line", + "execute", + "executable" + ], + "dependencies": { + "path-key": "^3.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md new file mode 100644 index 0000000..557fbeb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md @@ -0,0 +1,115 @@ +# npm-run-path [![Build Status](https://travis-ci.org/sindresorhus/npm-run-path.svg?branch=master)](https://travis-ci.org/sindresorhus/npm-run-path) + +> Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries + +In [npm run scripts](https://docs.npmjs.com/cli/run-script) you can execute locally installed binaries by name. This enables the same outside npm. + + +## Install + +``` +$ npm install npm-run-path +``` + + +## Usage + +```js +const childProcess = require('child_process'); +const npmRunPath = require('npm-run-path'); + +console.log(process.env.PATH); +//=> '/usr/local/bin' + +console.log(npmRunPath()); +//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' + +// `foo` is a locally installed binary +childProcess.execFileSync('foo', { + env: npmRunPath.env() +}); +``` + + +## API + +### npmRunPath(options?) + +Returns the augmented path string. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### path + +Type: `string`
+Default: [`PATH`](https://github.com/sindresorhus/path-key) + +PATH to be appended.
+Set it to an empty string to exclude the default PATH. + +##### execPath + +Type: `string`
+Default: `process.execPath` + +Path to the current Node.js executable. Its directory is pushed to the front of PATH. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + +### npmRunPath.env(options?) + +Returns the augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### env + +Type: `Object` + +Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. + +##### execPath + +Type: `string`
+Default: `process.execPath` + +Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + + +## Related + +- [npm-run-path-cli](https://github.com/sindresorhus/npm-run-path-cli) - CLI for this module +- [execa](https://github.com/sindresorhus/execa) - Execute a locally installed binary + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts new file mode 100644 index 0000000..ea84cab --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts @@ -0,0 +1,64 @@ +declare namespace onetime { + interface Options { + /** + Throw an error when called more than once. + + @default false + */ + throw?: boolean; + } +} + +declare const onetime: { + /** + Ensure a function is only called once. When called multiple times it will return the return value from the first call. + + @param fn - Function that should only be called once. + @returns A function that only calls `fn` once. + + @example + ``` + import onetime = require('onetime'); + + let i = 0; + + const foo = onetime(() => ++i); + + foo(); //=> 1 + foo(); //=> 1 + foo(); //=> 1 + + onetime.callCount(foo); //=> 3 + ``` + */ + ( + fn: (...arguments: ArgumentsType) => ReturnType, + options?: onetime.Options + ): (...arguments: ArgumentsType) => ReturnType; + + /** + Get the number of times `fn` has been called. + + @param fn - Function to get call count from. + @returns A number representing how many times `fn` has been called. + + @example + ``` + import onetime = require('onetime'); + + const foo = onetime(() => {}); + foo(); + foo(); + foo(); + + console.log(onetime.callCount(foo)); + //=> 3 + ``` + */ + callCount(fn: (...arguments: any[]) => unknown): number; + + // TODO: Remove this for the next major release + default: typeof onetime; +}; + +export = onetime; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js new file mode 100644 index 0000000..99c5fc1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js @@ -0,0 +1,44 @@ +'use strict'; +const mimicFn = require('mimic-fn'); + +const calledFunctions = new WeakMap(); + +const onetime = (function_, options = {}) => { + if (typeof function_ !== 'function') { + throw new TypeError('Expected a function'); + } + + let returnValue; + let callCount = 0; + const functionName = function_.displayName || function_.name || ''; + + const onetime = function (...arguments_) { + calledFunctions.set(onetime, ++callCount); + + if (callCount === 1) { + returnValue = function_.apply(this, arguments_); + function_ = null; + } else if (options.throw === true) { + throw new Error(`Function \`${functionName}\` can only be called once`); + } + + return returnValue; + }; + + mimicFn(onetime, function_); + calledFunctions.set(onetime, callCount); + + return onetime; +}; + +module.exports = onetime; +// TODO: Remove this for the next major release +module.exports.default = onetime; + +module.exports.callCount = function_ => { + if (!calledFunctions.has(function_)) { + throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`); + } + + return calledFunctions.get(function_); +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json new file mode 100644 index 0000000..54caea5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json @@ -0,0 +1,43 @@ +{ + "name": "onetime", + "version": "5.1.2", + "description": "Ensure a function is only called once", + "license": "MIT", + "repository": "sindresorhus/onetime", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "once", + "function", + "one", + "onetime", + "func", + "fn", + "single", + "call", + "called", + "prevent" + ], + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md new file mode 100644 index 0000000..2d133d3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md @@ -0,0 +1,94 @@ +# onetime [![Build Status](https://travis-ci.com/sindresorhus/onetime.svg?branch=master)](https://travis-ci.com/github/sindresorhus/onetime) + +> Ensure a function is only called once + +When called multiple times it will return the return value from the first call. + +*Unlike the module [once](https://github.com/isaacs/once), this one isn't naughty and extending `Function.prototype`.* + +## Install + +``` +$ npm install onetime +``` + +## Usage + +```js +const onetime = require('onetime'); + +let i = 0; + +const foo = onetime(() => ++i); + +foo(); //=> 1 +foo(); //=> 1 +foo(); //=> 1 + +onetime.callCount(foo); //=> 3 +``` + +```js +const onetime = require('onetime'); + +const foo = onetime(() => {}, {throw: true}); + +foo(); + +foo(); +//=> Error: Function `foo` can only be called once +``` + +## API + +### onetime(fn, options?) + +Returns a function that only calls `fn` once. + +#### fn + +Type: `Function` + +Function that should only be called once. + +#### options + +Type: `object` + +##### throw + +Type: `boolean`\ +Default: `false` + +Throw an error when called more than once. + +### onetime.callCount(fn) + +Returns a number representing how many times `fn` has been called. + +Note: It throws an error if you pass in a function that is not wrapped by `onetime`. + +```js +const onetime = require('onetime'); + +const foo = onetime(() => {}); + +foo(); +foo(); +foo(); + +console.log(onetime.callCount(foo)); +//=> 3 +``` + +#### fn + +Type: `Function` + +Function to get call count from. + +## onetime for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of onetime and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-onetime?utm_source=npm-onetime&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts new file mode 100644 index 0000000..7c575d1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts @@ -0,0 +1,40 @@ +/// + +declare namespace pathKey { + interface Options { + /** + Use a custom environment variables object. Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env). + */ + readonly env?: {[key: string]: string | undefined}; + + /** + Get the PATH key for a specific platform. Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform). + */ + readonly platform?: NodeJS.Platform; + } +} + +declare const pathKey: { + /** + Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform. + + @example + ``` + import pathKey = require('path-key'); + + const key = pathKey(); + //=> 'PATH' + + const PATH = process.env[key]; + //=> '/usr/local/bin:/usr/bin:/bin' + ``` + */ + (options?: pathKey.Options): string; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function pathKey(options?: pathKey.Options): string; + // export = pathKey; + default: typeof pathKey; +}; + +export = pathKey; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js new file mode 100644 index 0000000..0cf6415 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js @@ -0,0 +1,16 @@ +'use strict'; + +const pathKey = (options = {}) => { + const environment = options.env || process.env; + const platform = options.platform || process.platform; + + if (platform !== 'win32') { + return 'PATH'; + } + + return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path'; +}; + +module.exports = pathKey; +// TODO: Remove this for the next major release +module.exports.default = pathKey; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json new file mode 100644 index 0000000..c8cbd38 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json @@ -0,0 +1,39 @@ +{ + "name": "path-key", + "version": "3.1.1", + "description": "Get the PATH environment variable key cross-platform", + "license": "MIT", + "repository": "sindresorhus/path-key", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "path", + "key", + "environment", + "env", + "variable", + "var", + "get", + "cross-platform", + "windows" + ], + "devDependencies": { + "@types/node": "^11.13.0", + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md new file mode 100644 index 0000000..a9052d7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md @@ -0,0 +1,61 @@ +# path-key [![Build Status](https://travis-ci.org/sindresorhus/path-key.svg?branch=master)](https://travis-ci.org/sindresorhus/path-key) + +> Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform + +It's usually `PATH`, but on Windows it can be any casing like `Path`... + + +## Install + +``` +$ npm install path-key +``` + + +## Usage + +```js +const pathKey = require('path-key'); + +const key = pathKey(); +//=> 'PATH' + +const PATH = process.env[key]; +//=> '/usr/local/bin:/usr/bin:/bin' +``` + + +## API + +### pathKey(options?) + +#### options + +Type: `object` + +##### env + +Type: `object`
+Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env) + +Use a custom environment variables object. + +#### platform + +Type: `string`
+Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform) + +Get the PATH key for a specific platform. + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md new file mode 100644 index 0000000..8ccc6c1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md @@ -0,0 +1,136 @@ +# Release history + +**All notable changes to this project will be documented in this file.** + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## 2.3.1 (2022-01-02) + +### Fixed + +* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). + +### Changed + +* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). + +## 2.3.0 (2021-05-21) + +### Fixed + +* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) + +## 2.2.3 (2021-04-10) + +### Fixed + +* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). +* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). + +## 2.2.2 (2020-03-21) + +### Fixed + +* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). + +## 2.2.1 (2020-01-04) + +* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. + +## 2.2.0 (2020-01-04) + +* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) +* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. + +## 2.1.0 (2019-10-31) + +* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) +* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) +* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) +* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) +* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) +* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) +* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) +* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) +* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) +* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) +* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) +* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) +* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) +* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) +* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) +* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) +* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) +* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) +* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) +* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) +* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) +* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) + +## 2.0.7 (2019-05-14) + +* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) +* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) +* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) +* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) + +## 2.0.4 (2019-04-10) + +### Fixed + +- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. +- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. + +## 2.0.0 (2019-04-10) + +### Added + +- Adds support for `options.onIgnore`. See the readme for details +- Adds support for `options.onResult`. See the readme for details + +### Breaking changes + +- The unixify option was renamed to `windows` +- caching and all related options and methods have been removed + +## 1.0.0 (2018-11-05) + +- adds `.onMatch` option +- improvements to `.scan` method +- numerous improvements and optimizations for matching and parsing +- better windows path handling + +## 0.1.0 - 2017-04-13 + +First release. + + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE new file mode 100644 index 0000000..3608dca --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md new file mode 100644 index 0000000..b0526e2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md @@ -0,0 +1,708 @@ +

Picomatch

+ +

+ +version + + +test status + + +coverage status + + +downloads + +

+ +
+
+ +

+Blazing fast and accurate glob matcher written in JavaScript.
+No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. +

+ +
+
+ +## Why picomatch? + +* **Lightweight** - No dependencies +* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. +* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) +* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) +* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. +* **Well tested** - Thousands of unit tests + +See the [library comparison](#library-comparisons) to other libraries. + +
+
+ +## Table of Contents + +
Click to expand + +- [Install](#install) +- [Usage](#usage) +- [API](#api) + * [picomatch](#picomatch) + * [.test](#test) + * [.matchBase](#matchbase) + * [.isMatch](#ismatch) + * [.parse](#parse) + * [.scan](#scan) + * [.compileRe](#compilere) + * [.makeRe](#makere) + * [.toRegex](#toregex) +- [Options](#options) + * [Picomatch options](#picomatch-options) + * [Scan Options](#scan-options) + * [Options Examples](#options-examples) +- [Globbing features](#globbing-features) + * [Basic globbing](#basic-globbing) + * [Advanced globbing](#advanced-globbing) + * [Braces](#braces) + * [Matching special characters as literals](#matching-special-characters-as-literals) +- [Library Comparisons](#library-comparisons) +- [Benchmarks](#benchmarks) +- [Philosophies](#philosophies) +- [About](#about) + * [Author](#author) + * [License](#license) + +_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ + +
+ +
+
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +npm install --save picomatch +``` + +
+ +## Usage + +The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. + +```js +const pm = require('picomatch'); +const isMatch = pm('*.js'); + +console.log(isMatch('abcd')); //=> false +console.log(isMatch('a.js')); //=> true +console.log(isMatch('a.md')); //=> false +console.log(isMatch('a/b.js')); //=> false +``` + +
+ +## API + +### [picomatch](lib/picomatch.js#L32) + +Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. + +**Params** + +* `globs` **{String|Array}**: One or more glob patterns. +* `options` **{Object=}** +* `returns` **{Function=}**: Returns a matcher function. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch(glob[, options]); + +const isMatch = picomatch('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +### [.test](lib/picomatch.js#L117) + +Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. + +**Params** + +* `input` **{String}**: String to test. +* `regex` **{RegExp}** +* `returns` **{Object}**: Returns an object with matching info. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.test(input, regex[, options]); + +console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); +// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } +``` + +### [.matchBase](lib/picomatch.js#L161) + +Match the basename of a filepath. + +**Params** + +* `input` **{String}**: String to test. +* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). +* `returns` **{Boolean}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.matchBase(input, glob[, options]); +console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true +``` + +### [.isMatch](lib/picomatch.js#L183) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* **{String|Array}**: str The string to test. +* **{String|Array}**: patterns One or more glob patterns to use for matching. +* **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.isMatch(string, patterns[, options]); + +console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(picomatch.isMatch('a.a', 'b.*')); //=> false +``` + +### [.parse](lib/picomatch.js#L199) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.parse(pattern[, options]); +``` + +### [.scan](lib/picomatch.js#L231) + +Scan a glob pattern to separate the pattern into segments. + +**Params** + +* `input` **{String}**: Glob pattern to scan. +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.scan(input[, options]); + +const result = picomatch.scan('!./foo/*.js'); +console.log(result); +{ prefix: '!./', + input: '!./foo/*.js', + start: 3, + base: 'foo', + glob: '*.js', + isBrace: false, + isBracket: false, + isGlob: true, + isExtglob: false, + isGlobstar: false, + negated: true } +``` + +### [.compileRe](lib/picomatch.js#L245) + +Compile a regular expression from the `state` object returned by the +[parse()](#parse) method. + +**Params** + +* `state` **{Object}** +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. +* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. +* `returns` **{RegExp}** + +### [.makeRe](lib/picomatch.js#L286) + +Create a regular expression from a parsed glob pattern. + +**Params** + +* `state` **{String}**: The object returned from the `.parse` method. +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. +* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const picomatch = require('picomatch'); +const state = picomatch.parse('*.js'); +// picomatch.compileRe(state[, options]); + +console.log(picomatch.compileRe(state)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +### [.toRegex](lib/picomatch.js#L321) + +Create a regular expression from the given regex source string. + +**Params** + +* `source` **{String}**: Regular expression source string. +* `options` **{Object}** +* `returns` **{RegExp}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.toRegex(source[, options]); + +const { output } = picomatch.parse('*.js'); +console.log(picomatch.toRegex(output)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +
+ +## Options + +### Picomatch options + +The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | +| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | + +picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. + +### Scan Options + +In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | +| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.scan('!./foo/*.js', { tokens: true }); +console.log(result); +// { +// prefix: '!./', +// input: '!./foo/*.js', +// start: 3, +// base: 'foo', +// glob: '*.js', +// isBrace: false, +// isBracket: false, +// isGlob: true, +// isExtglob: false, +// isGlobstar: false, +// negated: true, +// maxDepth: 2, +// tokens: [ +// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, +// { value: 'foo', depth: 1, isGlob: false }, +// { value: '*.js', depth: 1, isGlob: true } +// ], +// slashes: [ 2, 6 ], +// parts: [ 'foo', '*.js' ] +// } +``` + +
+ +### Options Examples + +#### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a folder + +```js +const fill = require('fill-range'); +const regex = pm.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex); +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +#### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')); //=> true +``` + +#### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onMatch }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +
+
+ +## Globbing features + +* [Basic globbing](#basic-globbing) (Wildcard matching) +* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) + +### Basic globbing + +| **Character** | **Description** | +| --- | --- | +| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | +| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | +| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | +| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | + +#### Matching behavior vs. Bash + +Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: + +* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. +* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. + +
+ +### Advanced globbing + +* [extglobs](#extglobs) +* [POSIX brackets](#posix-brackets) +* [Braces](#brace-expansion) + +#### Extglobs + +| **Pattern** | **Description** | +| --- | --- | +| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | +| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | +| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | +| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | +| `!(pattern)` | Match _anything but_ `pattern` | + +**Examples** + +```js +const pm = require('picomatch'); + +// *(pattern) matches ZERO or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// +(pattern) matches ONE or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// supports multiple extglobs +console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false + +// supports nested extglobs +console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true +``` + +#### POSIX brackets + +POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. + +**Enable POSIX bracket support** + +```js +console.log(pm.makeRe('[[:word:]]+', { posix: true })); +//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ +``` + +**Supported POSIX classes** + +The following named POSIX bracket expressions are supported: + +* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` +* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. +* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. +* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. +* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. +* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. +* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. +* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. +* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. +* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. +* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. +* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. +* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. +* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. + +See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. + +### Braces + +Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. + +### Matching special characters as literals + +If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: + +**Special Characters** + +Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. + +To match any of the following characters as literals: `$^*+?()[] + +Examples: + +```js +console.log(pm.makeRe('foo/bar \\(1\\)')); +console.log(pm.makeRe('foo/bar \\(1\\)')); +``` + +
+
+ +## Library Comparisons + +The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). + +| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | +| --- | --- | --- | --- | --- | --- | --- | --- | +| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | +| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | +| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | +| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | +| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | +| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | +| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | +| File system operations | - | - | - | - | - | - | - | + +
+
+ +## Benchmarks + +Performance comparison of picomatch and minimatch. + +``` +# .makeRe star + picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) + minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) + +# .makeRe star; dot=true + picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) + minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) + +# .makeRe globstar + picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) + minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) + +# .makeRe globstars + picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) + minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) + +# .makeRe with leading star + picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) + minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) + +# .makeRe - basic braces + picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) + minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) +``` + +
+
+ +## Philosophies + +The goal of this library is to be blazing fast, without compromising on accuracy. + +**Accuracy** + +The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. + +Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. + +**Performance** + +Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. + +
+
+ +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js new file mode 100644 index 0000000..d2f2bc5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/picomatch'); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js new file mode 100644 index 0000000..a62ef38 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js @@ -0,0 +1,179 @@ +'use strict'; + +const path = require('path'); +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)` +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + SEP: path.sep, + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js new file mode 100644 index 0000000..58269d0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js @@ -0,0 +1,1091 @@ +'use strict'; + +const constants = require('./constants'); +const utils = require('./utils'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(win32); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(win32); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js new file mode 100644 index 0000000..782d809 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js @@ -0,0 +1,342 @@ +'use strict'; + +const path = require('path'); +const scan = require('./scan'); +const parse = require('./parse'); +const utils = require('./utils'); +const constants = require('./constants'); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = utils.isWindows(options); + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(path.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js new file mode 100644 index 0000000..e59cd7a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js @@ -0,0 +1,391 @@ +'use strict'; + +const utils = require('./utils'); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = require('./constants'); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js new file mode 100644 index 0000000..c3ca766 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js @@ -0,0 +1,64 @@ +'use strict'; + +const path = require('path'); +const win32 = process.platform === 'win32'; +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = require('./constants'); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + return false; +}; + +exports.isWindows = options => { + if (options && typeof options.windows === 'boolean') { + return options.windows; + } + return win32 === true || path.sep === '\\'; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json new file mode 100644 index 0000000..3db22d4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json @@ -0,0 +1,81 @@ +{ + "name": "picomatch", + "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", + "version": "2.3.1", + "homepage": "https://github.com/micromatch/picomatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "funding": "https://github.com/sponsors/jonschlinkert", + "repository": "micromatch/picomatch", + "bugs": { + "url": "https://github.com/micromatch/picomatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8.6" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "devDependencies": { + "eslint": "^6.8.0", + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.2.2", + "nyc": "^15.0.0", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "glob", + "match", + "picomatch" + ], + "nyc": { + "reporter": [ + "html", + "lcov", + "text-summary" + ] + }, + "verb": { + "toc": { + "render": true, + "method": "preWrite", + "maxdepth": 3 + }, + "layout": "empty", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "micromatch" + ] + }, + "reflinks": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "micromatch", + "minimatch", + "nanomatch", + "picomatch" + ] + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE new file mode 100755 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md new file mode 100644 index 0000000..0be05a6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md @@ -0,0 +1,90 @@ +# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master +[ci-url]: https://github.com/feross/queue-microtask/actions +[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg +[npm-url]: https://npmjs.org/package/queue-microtask +[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg +[downloads-url]: https://npmjs.org/package/queue-microtask +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines + +- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. +- No dependencies. Less than 10 lines. No shims or complicated fallbacks. +- Optimal performance in all modern environments + - Uses `queueMicrotask` in modern environments + - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) + +## install + +``` +npm install queue-microtask +``` + +## usage + +```js +const queueMicrotask = require('queue-microtask') + +queueMicrotask(() => { /* this will run soon */ }) +``` + +## What is `queueMicrotask` and why would one use it? + +The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. + +A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. + +The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. + +Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. + +See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. + +## Who is this package for? + +This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. + +If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. + +## Why not use `process.nextTick`? + +In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. + +You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. + +## Why not use `setTimeout(fn, 0)`? + +This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. + +## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? + +These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. + +This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. + +Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. + +Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. + +## What is a shim? + +> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) + +This package could also be described as a "ponyfill". + +> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) + +## API + +### `queueMicrotask(fn)` + +The `queueMicrotask()` method queues a microtask. + +The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts new file mode 100644 index 0000000..b6a8646 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts @@ -0,0 +1,2 @@ +declare const queueMicrotask: (cb: () => void) => void +export = queueMicrotask diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js new file mode 100644 index 0000000..5560534 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js @@ -0,0 +1,9 @@ +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json new file mode 100644 index 0000000..d29a401 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json @@ -0,0 +1,55 @@ +{ + "name": "queue-microtask", + "description": "fast, tiny `queueMicrotask` shim for modern engines", + "version": "1.2.3", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/queue-microtask/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.2.2" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/queue-microtask", + "keywords": [ + "asap", + "immediate", + "micro task", + "microtask", + "nextTick", + "process.nextTick", + "queue micro task", + "queue microtask", + "queue-microtask", + "queueMicrotask", + "setImmediate", + "task" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/queue-microtask.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml new file mode 100644 index 0000000..4872c5a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml new file mode 100644 index 0000000..1e30ad8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml @@ -0,0 +1,96 @@ +name: ci + +on: [push, pull_request] + +jobs: + legacy: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --production && npm install tape + + - name: Run tests + run: | + npm run test + + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [18.x, 20.x, 22.x] + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test:coverage + + types: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install + run: | + npm install + + - name: Run types tests + run: | + npm run test:typescript + + lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install + run: | + npm install + + - name: Lint + run: | + npm run lint diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE new file mode 100644 index 0000000..56d1590 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015-2024 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md new file mode 100644 index 0000000..1aaee5d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md @@ -0,0 +1,139 @@ +# reusify + +[![npm version][npm-badge]][npm-url] + +Reuse your objects and functions for maximum speed. This technique will +make any function run ~10% faster. You call your functions a +lot, and it adds up quickly in hot code paths. + +``` +$ node benchmarks/createNoCodeFunction.js +Total time 53133 +Total iterations 100000000 +Iteration/s 1882069.5236482036 + +$ node benchmarks/reuseNoCodeFunction.js +Total time 50617 +Total iterations 100000000 +Iteration/s 1975620.838848608 +``` + +The above benchmark uses fibonacci to simulate a real high-cpu load. +The actual numbers might differ for your use case, but the difference +should not. + +The benchmark was taken using Node v6.10.0. + +This library was extracted from +[fastparallel](http://npm.im/fastparallel). + +## Example + +```js +var reusify = require('reusify') +var fib = require('reusify/benchmarks/fib') +var instance = reusify(MyObject) + +// get an object from the cache, +// or creates a new one when cache is empty +var obj = instance.get() + +// set the state +obj.num = 100 +obj.func() + +// reset the state. +// if the state contains any external object +// do not use delete operator (it is slow) +// prefer set them to null +obj.num = 0 + +// store an object in the cache +instance.release(obj) + +function MyObject () { + // you need to define this property + // so V8 can compile MyObject into an + // hidden class + this.next = null + this.num = 0 + + var that = this + + // this function is never reallocated, + // so it can be optimized by V8 + this.func = function () { + if (null) { + // do nothing + } else { + // calculates fibonacci + fib(that.num) + } + } +} +``` + +The above example was intended for synchronous code, let's see async: +```js +var reusify = require('reusify') +var instance = reusify(MyObject) + +for (var i = 0; i < 100; i++) { + getData(i, console.log) +} + +function getData (value, cb) { + var obj = instance.get() + + obj.value = value + obj.cb = cb + obj.run() +} + +function MyObject () { + this.next = null + this.value = null + + var that = this + + this.run = function () { + asyncOperation(that.value, that.handle) + } + + this.handle = function (err, result) { + that.cb(err, result) + that.value = null + that.cb = null + instance.release(that) + } +} +``` + +Also note how in the above examples, the code, that consumes an instance of `MyObject`, +reset the state to initial condition, just before storing it in the cache. +That's needed so that every subsequent request for an instance from the cache, +could get a clean instance. + +## Why + +It is faster because V8 doesn't have to collect all the functions you +create. On a short-lived benchmark, it is as fast as creating the +nested function, but on a longer time frame it creates less +pressure on the garbage collector. + +## Other examples +If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). + +## Acknowledgements + +Thanks to [Trevor Norris](https://github.com/trevnorris) for +getting me down the rabbit hole of performance, and thanks to [Mathias +Buss](http://github.com/mafintosh) for suggesting me to share this +trick. + +## License + +MIT + +[npm-badge]: https://badge.fury.io/js/reusify.svg +[npm-url]: https://badge.fury.io/js/reusify diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md new file mode 100644 index 0000000..dd9f1d5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 1.x | :white_check_mark: | +| < 1.0 | :x: | + +## Reporting a Vulnerability + +Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js new file mode 100644 index 0000000..ce1aac7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js @@ -0,0 +1,30 @@ +'use strict' + +var fib = require('./fib') +var max = 100000000 +var start = Date.now() + +// create a funcion with the typical error +// pattern, that delegates the heavy load +// to something else +function createNoCodeFunction () { + /* eslint no-constant-condition: "off" */ + var num = 100 + + ;(function () { + if (null) { + // do nothing + } else { + fib(num) + } + })() +} + +for (var i = 0; i < max; i++) { + createNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js new file mode 100644 index 0000000..e22cc48 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js @@ -0,0 +1,13 @@ +'use strict' + +function fib (num) { + var fib = [] + + fib[0] = 0 + fib[1] = 1 + for (var i = 2; i <= num; i++) { + fib[i] = fib[i - 2] + fib[i - 1] + } +} + +module.exports = fib diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js new file mode 100644 index 0000000..3358d6e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js @@ -0,0 +1,38 @@ +'use strict' + +var reusify = require('../') +var fib = require('./fib') +var instance = reusify(MyObject) +var max = 100000000 +var start = Date.now() + +function reuseNoCodeFunction () { + var obj = instance.get() + obj.num = 100 + obj.func() + obj.num = 0 + instance.release(obj) +} + +function MyObject () { + this.next = null + var that = this + this.num = 0 + this.func = function () { + /* eslint no-constant-condition: "off" */ + if (null) { + // do nothing + } else { + fib(that.num) + } + } +} + +for (var i = 0; i < max; i++) { + reuseNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js new file mode 100644 index 0000000..d0a9af6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js @@ -0,0 +1,14 @@ +'use strict' + +const base = require('neostandard')({}) + +module.exports = [ + ...base, + { + name: 'old-standard', + rules: { + 'no-var': 'off', + 'object-shorthand': 'off', + } + } +] diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json new file mode 100644 index 0000000..e47ff11 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json @@ -0,0 +1,50 @@ +{ + "name": "reusify", + "version": "1.1.0", + "description": "Reuse objects and functions with style", + "main": "reusify.js", + "types": "reusify.d.ts", + "scripts": { + "lint": "eslint", + "test": "tape test.js", + "test:coverage": "c8 --100 tape test.js", + "test:typescript": "tsc" + }, + "pre-commit": [ + "lint", + "test", + "test:typescript" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/reusify.git" + }, + "keywords": [ + "reuse", + "object", + "performance", + "function", + "fast" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/reusify/issues" + }, + "homepage": "https://github.com/mcollina/reusify#readme", + "engines": { + "node": ">=0.10.0", + "iojs": ">=1.0.0" + }, + "devDependencies": { + "@types/node": "^22.9.0", + "eslint": "^9.13.0", + "neostandard": "^0.12.0", + "pre-commit": "^1.2.2", + "tape": "^5.0.0", + "c8": "^10.1.2", + "typescript": "^5.2.2" + }, + "dependencies": { + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts new file mode 100644 index 0000000..9ba277d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts @@ -0,0 +1,14 @@ +interface Node { + next: Node | null; +} + +interface Constructor { + new(): T; +} + +declare function reusify(constructor: Constructor): { + get(): T; + release(node: T): void; +}; + +export = reusify; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js new file mode 100644 index 0000000..e6f36f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js @@ -0,0 +1,33 @@ +'use strict' + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js new file mode 100644 index 0000000..929cfd7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js @@ -0,0 +1,66 @@ +'use strict' + +var test = require('tape') +var reusify = require('./') + +test('reuse objects', function (t) { + t.plan(6) + + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + + t.notEqual(obj, instance.get(), 'two instance created') + t.notOk(obj.next, 'next must be null') + + instance.release(obj) + + // the internals keeps a hot copy ready for reuse + // putting this one back in the queue + instance.release(instance.get()) + + // comparing the old one with the one we got + // never do this in real code, after release you + // should never reuse that instance + t.equal(obj, instance.get(), 'instance must be reused') +}) + +test('reuse more than 2 objects', function (t) { + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + var obj2 = instance.get() + var obj3 = instance.get() + + t.notOk(obj.next, 'next must be null') + t.notOk(obj2.next, 'next must be null') + t.notOk(obj3.next, 'next must be null') + + t.notEqual(obj, obj2) + t.notEqual(obj, obj3) + t.notEqual(obj3, obj2) + + instance.release(obj) + instance.release(obj2) + instance.release(obj3) + + // skip one + instance.get() + + var obj4 = instance.get() + var obj5 = instance.get() + var obj6 = instance.get() + + t.equal(obj4, obj) + t.equal(obj5, obj2) + t.equal(obj6, obj3) + t.end() +}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json new file mode 100644 index 0000000..dbe862b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "files": [ + "./reusify.d.ts" + ] +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE new file mode 100644 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md new file mode 100644 index 0000000..edc3da4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md @@ -0,0 +1,85 @@ +# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg +[travis-url]: https://travis-ci.org/feross/run-parallel +[npm-image]: https://img.shields.io/npm/v/run-parallel.svg +[npm-url]: https://npmjs.org/package/run-parallel +[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg +[downloads-url]: https://npmjs.org/package/run-parallel +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Run an array of functions in parallel + +![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) + +### install + +``` +npm install run-parallel +``` + +### usage + +#### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its callback, the main +`callback` is immediately called with the value of the error. Once the `tasks` have +completed, the results are passed to the final `callback` as an array. + +It is also possible to use an object instead of an array. Each property will be run as a +function and the results will be passed to the final `callback` as an object instead of +an array. This can be a more readable way of handling the results. + +##### arguments + +- `tasks` - An array or object containing functions to run. Each function is passed a +`callback(err, result)` which it must call on completion with an error `err` (which can +be `null`) and an optional `result` value. +- `callback(err, results)` - An optional callback to run once all the functions have +completed. This function gets a results array (or object) containing all the result +arguments passed to the task callbacks. + +##### example + +```js +var parallel = require('run-parallel') + +parallel([ + function (callback) { + setTimeout(function () { + callback(null, 'one') + }, 200) + }, + function (callback) { + setTimeout(function () { + callback(null, 'two') + }, 100) + } +], +// optional callback +function (err, results) { + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}) +``` + +This module is basically equavalent to +[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's +handy to just have the one function you need instead of the kitchen sink. Modularity! +Especially handy if you're serving to the browser and need to reduce your javascript +bundle size. + +Works great in the browser with [browserify](http://browserify.org/)! + +### see also + +- [run-auto](https://github.com/feross/run-auto) +- [run-parallel-limit](https://github.com/feross/run-parallel-limit) +- [run-series](https://github.com/feross/run-series) +- [run-waterfall](https://github.com/feross/run-waterfall) + +### license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js new file mode 100644 index 0000000..6307141 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js @@ -0,0 +1,51 @@ +/*! run-parallel. MIT License. Feross Aboukhadijeh */ +module.exports = runParallel + +const queueMicrotask = require('queue-microtask') + +function runParallel (tasks, cb) { + let results, pending, keys + let isSync = true + + if (Array.isArray(tasks)) { + results = [] + pending = tasks.length + } else { + keys = Object.keys(tasks) + results = {} + pending = keys.length + } + + function done (err) { + function end () { + if (cb) cb(err, results) + cb = null + } + if (isSync) queueMicrotask(end) + else end() + } + + function each (i, err, result) { + results[i] = result + if (--pending === 0 || err) { + done(err) + } + } + + if (!pending) { + // empty + done(null) + } else if (keys) { + // object + keys.forEach(function (key) { + tasks[key](function (err, result) { each(key, err, result) }) + }) + } else { + // array + tasks.forEach(function (task, i) { + task(function (err, result) { each(i, err, result) }) + }) + } + + isSync = false +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json new file mode 100644 index 0000000..1f14757 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json @@ -0,0 +1,58 @@ +{ + "name": "run-parallel", + "description": "Run an array of functions in parallel", + "version": "1.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/run-parallel/issues" + }, + "dependencies": { + "queue-microtask": "^1.2.2" + }, + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/feross/run-parallel", + "keywords": [ + "parallel", + "async", + "function", + "callback", + "asynchronous", + "run", + "array", + "run parallel" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/run-parallel.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js new file mode 100644 index 0000000..f35db30 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js @@ -0,0 +1,19 @@ +'use strict'; +const shebangRegex = require('shebang-regex'); + +module.exports = (string = '') => { + const match = string.match(shebangRegex); + + if (!match) { + return null; + } + + const [path, argument] = match[0].replace(/#! ?/, '').split(' '); + const binary = path.split('/').pop(); + + if (binary === 'env') { + return argument; + } + + return argument ? `${binary} ${argument}` : binary; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license new file mode 100644 index 0000000..db6bc32 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json new file mode 100644 index 0000000..18e3c04 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json @@ -0,0 +1,34 @@ +{ + "name": "shebang-command", + "version": "2.0.0", + "description": "Get the command from a shebang", + "license": "MIT", + "repository": "kevva/shebang-command", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "github.com/kevva" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "cmd", + "command", + "parse", + "shebang" + ], + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "devDependencies": { + "ava": "^2.3.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md new file mode 100644 index 0000000..84feb44 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md @@ -0,0 +1,34 @@ +# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) + +> Get the command from a shebang + + +## Install + +``` +$ npm install shebang-command +``` + + +## Usage + +```js +const shebangCommand = require('shebang-command'); + +shebangCommand('#!/usr/bin/env node'); +//=> 'node' + +shebangCommand('#!/bin/bash'); +//=> 'bash' +``` + + +## API + +### shebangCommand(string) + +#### string + +Type: `string` + +String containing a shebang. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts new file mode 100644 index 0000000..61d034b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts @@ -0,0 +1,22 @@ +/** +Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. + +@example +``` +import shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` +*/ +declare const shebangRegex: RegExp; + +export = shebangRegex; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js new file mode 100644 index 0000000..63fc4a0 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = /^#!(.*)/; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json new file mode 100644 index 0000000..00ab30f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json @@ -0,0 +1,35 @@ +{ + "name": "shebang-regex", + "version": "3.0.0", + "description": "Regular expression for matching a shebang line", + "license": "MIT", + "repository": "sindresorhus/shebang-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "regex", + "regexp", + "shebang", + "match", + "test", + "line" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md new file mode 100644 index 0000000..5ecf863 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md @@ -0,0 +1,33 @@ +# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) + +> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line + + +## Install + +``` +$ npm install shebang-regex +``` + + +## Usage + +```js +const shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE new file mode 100644 index 0000000..40a2bf6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2012, Artur Adib +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md new file mode 100644 index 0000000..a0de676 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md @@ -0,0 +1,949 @@ +# ShellJS - Unix shell commands for Node.js + +[![GitHub Actions](https://img.shields.io/github/actions/workflow/status/shelljs/shelljs/main.yml?style=flat-square&logo=github)](https://github.com/shelljs/shelljs/actions/workflows/main.yml) +[![Codecov](https://img.shields.io/codecov/c/github/shelljs/shelljs/main.svg?style=flat-square&label=coverage)](https://codecov.io/gh/shelljs/shelljs) +[![npm version](https://img.shields.io/npm/v/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) +[![npm downloads](https://img.shields.io/npm/dm/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) + +ShellJS is a portable **(Windows/Linux/macOS)** implementation of Unix shell +commands on top of the Node.js API. You can use it to eliminate your shell +script's dependency on Unix while still keeping its familiar and powerful +commands. You can also install it globally so you can run it from outside Node +projects - say goodbye to those gnarly Bash scripts! + +ShellJS is proudly tested on every LTS node release since `v18`! + +The project is unit-tested and battle-tested in projects like: + ++ [Firebug](http://getfirebug.com/) - Firefox's infamous debugger ++ [JSHint](http://jshint.com) & [ESLint](http://eslint.org/) - popular JavaScript linters ++ [Zepto](http://zeptojs.com) - jQuery-compatible JavaScript library for modern browsers ++ [Yeoman](http://yeoman.io/) - Web application stack and development tool ++ [Deployd.com](http://deployd.com) - Open source PaaS for quick API backend generation ++ And [many more](https://npmjs.org/browse/depended/shelljs). + +If you have feedback, suggestions, or need help, feel free to post in our [issue +tracker](https://github.com/shelljs/shelljs/issues). + +Think ShellJS is cool? Check out some related projects in our [Wiki +page](https://github.com/shelljs/shelljs/wiki)! + +Upgrading from an older version? Check out our [breaking +changes](https://github.com/shelljs/shelljs/wiki/Breaking-Changes) page to see +what changes to watch out for while upgrading. + +## Command line use + +If you just want cross platform UNIX commands, checkout our new project +[shelljs/shx](https://github.com/shelljs/shx), a utility to expose `shelljs` to +the command line. + +For example: + +``` +$ shx mkdir -p foo +$ shx touch foo/bar.txt +$ shx rm -rf foo +``` + +## Plugin API + +ShellJS now supports third-party plugins! You can learn more about using plugins +and writing your own ShellJS commands in [the +wiki](https://github.com/shelljs/shelljs/wiki/Using-ShellJS-Plugins). + +## A quick note about the docs + +For documentation on all the latest features, check out our +[README](https://github.com/shelljs/shelljs). To read docs that are consistent +with the latest release, check out [the npm +page](https://www.npmjs.com/package/shelljs). + +## Installing + +Via npm: + +```bash +$ npm install [-g] shelljs +``` + +## Examples + +```javascript +var shell = require('shelljs'); + +if (!shell.which('git')) { + shell.echo('Sorry, this script requires git'); + shell.exit(1); +} + +// Copy files to release dir +shell.rm('-rf', 'out/Release'); +shell.cp('-R', 'stuff/', 'out/Release'); + +// Replace macros in each .js file +shell.cd('lib'); +shell.ls('*.js').forEach(function (file) { + shell.sed('-i', 'BUILD_VERSION', 'v0.1.2', file); + shell.sed('-i', /^.*REMOVE_THIS_LINE.*$/, '', file); + shell.sed('-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, shell.cat('macro.js'), file); +}); +shell.cd('..'); + +// Run external tool synchronously +if (shell.exec('git commit -am "Auto-commit"').code !== 0) { + shell.echo('Error: Git commit failed'); + shell.exit(1); +} +``` + +## Exclude options + +If you need to pass a parameter that looks like an option, you can do so like: + +```js +shell.grep('--', '-v', 'path/to/file'); // Search for "-v", no grep options + +shell.cp('-R', '-dir', 'outdir'); // If already using an option, you're done +``` + +## Global vs. Local + +We no longer recommend using a global-import for ShellJS (i.e. +`require('shelljs/global')`). While still supported for convenience, this +pollutes the global namespace, and should therefore only be used with caution. + +Instead, we recommend a local import (standard for npm packages): + +```javascript +var shell = require('shelljs'); +shell.echo('hello world'); +``` + +Alternatively, we also support importing as a module with: + +```javascript +import shell from 'shelljs'; +shell.echo('hello world'); +``` + + + + +## Command reference + + +All commands run synchronously, unless otherwise stated. +All commands accept standard bash globbing characters (`*`, `?`, etc.), +compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). + +For less-commonly used commands and features, please check out our [wiki +page](https://github.com/shelljs/shelljs/wiki). + + +### cat([options,] file [, file ...]) +### cat([options,] file_array) + +Available options: + ++ `-n`: number all output lines + +Examples: + +```javascript +var str = cat('file*.txt'); +var str = cat('file1', 'file2'); +var str = cat(['file1', 'file2']); // same as above +``` + +Returns a [ShellString](#shellstringstr) containing the given file, or a +concatenated string containing the files if more than one file is given (a +new line character is introduced between each file). + + +### cd([dir]) + +Changes to directory `dir` for the duration of the script. Changes to home +directory if no argument is supplied. Returns a +[ShellString](#shellstringstr) to indicate success or failure. + + +### chmod([options,] octal_mode || octal_string, file) +### chmod([options,] symbolic_mode, file) + +Available options: + ++ `-v`: output a diagnostic for every file processed ++ `-c`: like verbose, but report only when a change is made ++ `-R`: change files and directories recursively + +Examples: + +```javascript +chmod(755, '/Users/brandon'); +chmod('755', '/Users/brandon'); // same as above +chmod('u+x', '/Users/brandon'); +chmod('-R', 'a-w', '/Users/brandon'); +``` + +Alters the permissions of a file or directory by either specifying the +absolute permissions in octal form or expressing the changes in symbols. +This command tries to mimic the POSIX behavior as much as possible. +Notable exceptions: + ++ In symbolic modes, `a-r` and `-r` are identical. No consideration is + given to the `umask`. ++ There is no "quiet" option, since default behavior is to run silent. ++ Windows OS uses a very different permission model than POSIX. `chmod()` + does its best on Windows, but there are limits to how file permissions can + be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, + so cross-platform compatibility should not be a concern there. + +Returns a [ShellString](#shellstringstr) indicating success or failure. + + +### cmd(arg1[, arg2, ...] [, options]) + +Available options: + ++ `cwd: directoryPath`: change the current working directory only for this + cmd() invocation. ++ `maxBuffer: num`: Raise or decrease the default buffer size for + stdout/stderr. ++ `timeout`: Change the default timeout. + +Examples: + +```javascript +var version = cmd('node', '--version').stdout; +cmd('git', 'commit', '-am', `Add suport for node ${version}`); +console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) +console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) +``` + +Executes the given command synchronously. This is intended as an easier +alternative for [exec()](#execcommand--options--callback), with better +security around globbing, comamnd injection, and variable expansion. This is +guaranteed to only run one external command, and won't give special +treatment for any shell characters (ex. this treats `|` as a literal +character, not as a shell pipeline). +This returns a [ShellString](#shellstringstr). + +By default, this performs globbing on all platforms, but you can disable +this with `set('-f')`. + +This **does not** support asynchronous mode. If you need asynchronous +command execution, check out [execa](https://www.npmjs.com/package/execa) or +the node builtin `child_process.execFile()` instead. + + +### cp([options,] source [, source ...], dest) +### cp([options,] source_array, dest) + +Available options: + ++ `-f`: force (default behavior) ++ `-n`: no-clobber ++ `-u`: only copy if `source` is newer than `dest` ++ `-r`, `-R`: recursive ++ `-L`: follow symlinks ++ `-P`: don't follow symlinks ++ `-p`: preserve file mode, ownership, and timestamps + +Examples: + +```javascript +cp('file1', 'dir1'); +cp('-R', 'path/to/dir/', '~/newCopy/'); +cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); +cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above +``` + +Copies files. Returns a [ShellString](#shellstringstr) indicating success +or failure. + + +### pushd([options,] [dir | '-N' | '+N']) + +Available options: + ++ `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. ++ `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. ++ `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. + +Examples: + +```javascript +// process.cwd() === '/usr' +pushd('/etc'); // Returns /etc /usr +pushd('+1'); // Returns /usr /etc +``` + +Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. + + +### popd([options,] ['-N' | '+N']) + +Available options: + ++ `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. ++ `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. + +Examples: + +```javascript +echo(process.cwd()); // '/usr' +pushd('/etc'); // '/etc /usr' +echo(process.cwd()); // '/etc' +popd(); // '/usr' +echo(process.cwd()); // '/usr' +``` + +When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. + + +### dirs([options | '+N' | '-N']) + +Available options: + ++ `-c`: Clears the directory stack by deleting all of the elements. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. ++ `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. + +Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. + +See also: `pushd`, `popd` + + +### echo([options,] string [, string ...]) + +Available options: + ++ `-e`: interpret backslash escapes (default) ++ `-n`: remove trailing newline from output + +Examples: + +```javascript +echo('hello world'); +var str = echo('hello world'); +echo('-n', 'no newline at end'); +``` + +Prints `string` to stdout, and returns a [ShellString](#shellstringstr). + + +### exec(command [, options] [, callback]) + +Available options: + ++ `async`: Asynchronous execution. If a callback is provided, it will be set to + `true`, regardless of the passed value (default: `false`). ++ `fatal`: Exit upon error (default: `false`). ++ `silent`: Do not echo program output to console (default: `false`). ++ `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and + what is written to stdout and stderr when not in silent mode (default: `'utf8'`). ++ and any option available to Node.js's + [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) + +Examples: + +```javascript +var version = exec('node --version', {silent:true}).stdout; + +var child = exec('some_long_running_process', {async:true}); +child.stdout.on('data', function(data) { + /* ... do something with data ... */ +}); + +exec('some_long_running_process', function(code, stdout, stderr) { + console.log('Exit code:', code); + console.log('Program output:', stdout); + console.log('Program stderr:', stderr); +}); +``` + +Executes the given `command` _synchronously_, unless otherwise specified. +When in synchronous mode, this returns a [ShellString](#shellstringstr). +Otherwise, this returns the child process object, and the `callback` +receives the arguments `(code, stdout, stderr)`. + +Not seeing the behavior you want? `exec()` runs everything through `sh` +by default (or `cmd.exe` on Windows), which differs from `bash`. If you +need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. + +**Security note:** as `shell.exec()` executes an arbitrary string in the +system shell, it is **critical** to properly sanitize user input to avoid +**command injection**. For more context, consult the [Security +Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). + + +### find(path [, path ...]) +### find(path_array) + +Examples: + +```javascript +find('src', 'lib'); +find(['src', 'lib']); // same as above +find('.').filter(function(file) { return file.match(/\.js$/); }); +``` + +Returns a [ShellString](#shellstringstr) (with array-like properties) of all +files (however deep) in the given paths. + +The main difference from `ls('-R', path)` is that the resulting file names +include the base directories (e.g., `lib/resources/file1` instead of just `file1`). + + +### grep([options,] regex_filter, file [, file ...]) +### grep([options,] regex_filter, file_array) + +Available options: + ++ `-v`: Invert `regex_filter` (only print non-matching lines). ++ `-l`: Print only filenames of matching files. ++ `-i`: Ignore case. ++ `-n`: Print line numbers. ++ `-B `: Show `` lines before each result. ++ `-A `: Show `` lines after each result. ++ `-C `: Show `` lines before and after each result. -B and -A override this option. + +Examples: + +```javascript +grep('-v', 'GLOBAL_VARIABLE', '*.js'); +grep('GLOBAL_VARIABLE', '*.js'); +grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); +grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); +grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); +``` + +Reads input string from given files and returns a +[ShellString](#shellstringstr) containing all lines of the @ file that match +the given `regex_filter`. + + +### head([{'-n': \},] file [, file ...]) +### head([{'-n': \},] file_array) + +Available options: + ++ `-n `: Show the first `` lines of the files + +Examples: + +```javascript +var str = head({'-n': 1}, 'file*.txt'); +var str = head('file1', 'file2'); +var str = head(['file1', 'file2']); // same as above +``` + +Read the start of a `file`. Returns a [ShellString](#shellstringstr). + + +### ln([options,] source, dest) + +Available options: + ++ `-s`: symlink ++ `-f`: force + +Examples: + +```javascript +ln('file', 'newlink'); +ln('-sf', 'file', 'existing'); +``` + +Links `source` to `dest`. Use `-f` to force the link, should `dest` already +exist. Returns a [ShellString](#shellstringstr) indicating success or +failure. + + +### ls([options,] [path, ...]) +### ls([options,] path_array) + +Available options: + ++ `-R`: recursive ++ `-A`: all files (include files beginning with `.`, except for `.` and `..`) ++ `-L`: follow symlinks ++ `-d`: list directories themselves, not their contents ++ `-l`: provides more details for each file. Specifically, each file is + represented by a structured object with separate fields for file + metadata (see + [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The + return value also overrides `.toString()` to resemble `ls -l`'s + output format for human readability, but programmatic usage should + depend on the stable object format rather than the `.toString()` + representation. + +Examples: + +```javascript +ls('projs/*.js'); +ls('projs/**/*.js'); // Find all js files recursively in projs +ls('-R', '/users/me', '/tmp'); +ls('-R', ['/users/me', '/tmp']); // same as above +ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} +``` + +Returns a [ShellString](#shellstringstr) (with array-like properties) of all +the files in the given `path`, or files in the current directory if no +`path` is provided. + + +### mkdir([options,] dir [, dir ...]) +### mkdir([options,] dir_array) + +Available options: + ++ `-p`: full path (and create intermediate directories, if necessary) + +Examples: + +```javascript +mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); +mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above +``` + +Creates directories. Returns a [ShellString](#shellstringstr) indicating +success or failure. + + +### mv([options ,] source [, source ...], dest') +### mv([options ,] source_array, dest') + +Available options: + ++ `-f`: force (default behavior) ++ `-n`: no-clobber + +Examples: + +```javascript +mv('-n', 'file', 'dir/'); +mv('file1', 'file2', 'dir/'); +mv(['file1', 'file2'], 'dir/'); // same as above +``` + +Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) +indicating success or failure. + + +### pwd() + +Returns the current directory as a [ShellString](#shellstringstr). + + +### rm([options,] file [, file ...]) +### rm([options,] file_array) + +Available options: + ++ `-f`: force ++ `-r, -R`: recursive + +Examples: + +```javascript +rm('-rf', '/tmp/*'); +rm('some_file.txt', 'another_file.txt'); +rm(['some_file.txt', 'another_file.txt']); // same as above +``` + +Removes files. Returns a [ShellString](#shellstringstr) indicating success +or failure. + + +### sed([options,] search_regex, replacement, file [, file ...]) +### sed([options,] search_regex, replacement, file_array) + +Available options: + ++ `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ + +Examples: + +```javascript +sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); +``` + +Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on +each of the lines from the input string using the given `search_regex` and `replacement` string or +function. Returns the new [ShellString](#shellstringstr) after replacement. + +Note: + +Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified +using the `$n` syntax: + +```javascript +sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); +``` + +Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file +(split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') +will not match anything and nothing will be replaced. + + +### set(options) + +Available options: + ++ `+/-e`: exit upon error (`config.fatal`) ++ `+/-v`: verbose: show all commands (`config.verbose`) ++ `+/-f`: disable filename expansion (globbing) + +Examples: + +```javascript +set('-e'); // exit upon first error +set('+e'); // this undoes a "set('-e')" +``` + +Sets global configuration variables. + + +### sort([options,] file [, file ...]) +### sort([options,] file_array) + +Available options: + ++ `-r`: Reverse the results ++ `-n`: Compare according to numerical value + +Examples: + +```javascript +sort('foo.txt', 'bar.txt'); +sort('-r', 'foo.txt'); +``` + +Return the contents of the `file`s, sorted line-by-line as a +[ShellString](#shellstringstr). Sorting multiple files mixes their content +(just as unix `sort` does). + + +### tail([{'-n': \},] file [, file ...]) +### tail([{'-n': \},] file_array) + +Available options: + ++ `-n `: Show the last `` lines of `file`s + +Examples: + +```javascript +var str = tail({'-n': 1}, 'file*.txt'); +var str = tail('file1', 'file2'); +var str = tail(['file1', 'file2']); // same as above +``` + +Read the end of a `file`. Returns a [ShellString](#shellstringstr). + + +### tempdir() + +Examples: + +```javascript +var tmp = tempdir(); // "/tmp" for most *nix platforms +``` + +Searches and returns string containing a writeable, platform-dependent temporary directory. +Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). + + +### test(expression) + +Available expression primaries: + ++ `'-b', 'path'`: true if path is a block device ++ `'-c', 'path'`: true if path is a character device ++ `'-d', 'path'`: true if path is a directory ++ `'-e', 'path'`: true if path exists ++ `'-f', 'path'`: true if path is a regular file ++ `'-L', 'path'`: true if path is a symbolic link ++ `'-p', 'path'`: true if path is a pipe (FIFO) ++ `'-S', 'path'`: true if path is a socket + +Examples: + +```javascript +if (test('-d', path)) { /* do something with dir */ }; +if (!test('-f', path)) continue; // skip if it's not a regular file +``` + +Evaluates `expression` using the available primaries and returns +corresponding boolean value. + + +### ShellString.prototype.to(file) + +Examples: + +```javascript +cat('input.txt').to('output.txt'); +``` + +Analogous to the redirection operator `>` in Unix, but works with +`ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix +redirections, `to()` will overwrite any existing file!_ Returns the same +[ShellString](#shellstringstr) this operated on, to support chaining. + + +### ShellString.prototype.toEnd(file) + +Examples: + +```javascript +cat('input.txt').toEnd('output.txt'); +``` + +Analogous to the redirect-and-append operator `>>` in Unix, but works with +`ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the +same [ShellString](#shellstringstr) this operated on, to support chaining. + + +### touch([options,] file [, file ...]) +### touch([options,] file_array) + +Available options: + ++ `-a`: Change only the access time ++ `-c`: Do not create any files ++ `-m`: Change only the modification time ++ `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) + instead of current time ++ `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current + time + +Examples: + +```javascript +touch('source.js'); +touch('-c', 'path/to/file.js'); +touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); +touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); +touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); +``` + +Update the access and modification times of each file to the current time. +A file argument that does not exist is created empty, unless `-c` is supplied. +This is a partial implementation of +[`touch(1)`](http://linux.die.net/man/1/touch). Returns a +[ShellString](#shellstringstr) indicating success or failure. + + +### uniq([options,] [input, [output]]) + +Available options: + ++ `-i`: Ignore case while comparing ++ `-c`: Prefix lines by the number of occurrences ++ `-d`: Only print duplicate lines, one for each group of identical lines + +Examples: + +```javascript +uniq('foo.txt'); +uniq('-i', 'foo.txt'); +uniq('-cd', 'foo.txt', 'bar.txt'); +``` + +Filter adjacent matching lines from `input`. Returns a +[ShellString](#shellstringstr). + + +### which(command) + +Examples: + +```javascript +var nodeExec = which('node'); +``` + +Searches for `command` in the system's `PATH`. On Windows, this uses the +`PATHEXT` variable to append the extension if it's not already executable. +Returns a [ShellString](#shellstringstr) containing the absolute path to +`command`. + + +### exit(code) + +Exits the current process with the given exit `code`. + +### error() + +Tests if error occurred in the last command. Returns a truthy value if an +error returned, or a falsy value otherwise. + +**Note**: do not rely on the +return value to be an error message. If you need the last error message, use +the `.stderr` attribute from the last command's return value instead. + + +### errorCode() + +Returns the error code from the last command. + + +### ShellString(str) + +Examples: + +```javascript +var foo = new ShellString('hello world'); +``` + +This is a dedicated type returned by most ShellJS methods, which wraps a +string (or array) value. This has all the string (or array) methods, but +also exposes extra methods: [`.to()`](#shellstringprototypetofile), +[`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods +(ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string +by calling `.toString()`. + +This type also exposes the corresponding command's stdout, stderr, and +return status code via the `.stdout` (string), `.stderr` (string), and +`.code` (number) properties respectively. + + +### env['VAR_NAME'] + +Object containing environment variables (both getter and setter). Shortcut +to `process.env`. + +### Pipes + +Examples: + +```javascript +grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); +echo("files with o's in the name:\n" + ls().grep('o')); +cat('test.js').exec('node'); // pipe to exec() call +``` + +Commands can send their output to another command in a pipe-like fashion. +`sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand +side of a pipe. Pipes can be chained. + +## Configuration + + +### config.silent + +Example: + +```javascript +var sh = require('shelljs'); +var silentState = sh.config.silent; // save old silent state +sh.config.silent = true; +/* ... */ +sh.config.silent = silentState; // restore old silent state +``` + +Suppresses all command output if `true`, except for `echo()` calls. +Default is `false`. + +### config.fatal + +Example: + +```javascript +require('shelljs/global'); +config.fatal = true; // or set('-e'); +cp('this_file_does_not_exist', '/dev/null'); // throws Error here +/* more commands... */ +``` + +If `true`, the script will throw a Javascript error when any shell.js +command encounters an error. Default is `false`. This is analogous to +Bash's `set -e`. + +### config.verbose + +Example: + +```javascript +config.verbose = true; // or set('-v'); +cd('dir/'); +rm('-rf', 'foo.txt', 'bar.txt'); +exec('echo hello'); +``` + +Will print each command as follows: + +``` +cd dir/ +rm -rf foo.txt bar.txt +exec echo hello +``` + +### config.globOptions (deprecated) + +**Deprecated**: we recommend that you do not edit `config.globOptions`. +Support for this configuration option may be changed or removed in a future +ShellJS release. + +**Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS +v0.9.x, `config.globOptions` is compatible with `fast-glob`. + +Example: + +```javascript +config.globOptions = {nodir: true}; +``` + +`config.globOptions` changes how ShellJS expands glob (wildcard) +expressions. See +[fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) +for available options. Be aware that modifying `config.globOptions` **may +break ShellJS functionality.** + +### config.reset() + +Example: + +```javascript +var shell = require('shelljs'); +// Make changes to shell.config, and do stuff... +/* ... */ +shell.config.reset(); // reset to original state +// Do more stuff, but with original settings +/* ... */ +``` + +Reset `shell.config` to the defaults: + +```javascript +{ + fatal: false, + globOptions: {}, + maxdepth: 255, + noglob: false, + silent: false, + verbose: false, +} +``` + +## Team + +| [![Nate Fischer](https://avatars.githubusercontent.com/u/5801521?s=130)](https://github.com/nfischer) | +|:---:| +| [Nate Fischer](https://github.com/nfischer) | diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js new file mode 100644 index 0000000..e061f5a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js @@ -0,0 +1,15 @@ +/* eslint no-extend-native: 0 */ +var shell = require('./shell'); +var common = require('./src/common'); + +Object.keys(shell).forEach(function (cmd) { + global[cmd] = shell[cmd]; +}); + +var _to = require('./src/to'); + +String.prototype.to = common.wrap('to', _to); + +var _toEnd = require('./src/toEnd'); + +String.prototype.toEnd = common.wrap('toEnd', _toEnd); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js new file mode 100644 index 0000000..a8438c8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js @@ -0,0 +1,57 @@ +require('./global'); + +global.config.fatal = true; +global.target = {}; + +var args = process.argv.slice(2), + targetArgs, + dashesLoc = args.indexOf('--'); + +// split args, everything after -- if only for targets +if (dashesLoc > -1) { + targetArgs = args.slice(dashesLoc + 1, args.length); + args = args.slice(0, dashesLoc); +} + +// This ensures we only execute the script targets after the entire script has +// been evaluated +setTimeout(function() { + var t; + + if (args.length === 1 && args[0] === '--help') { + console.log('Available targets:'); + for (t in global.target) + console.log(' ' + t); + return; + } + + // Wrap targets to prevent duplicate execution + for (t in global.target) { + (function(t, oldTarget){ + + // Wrap it + global.target[t] = function() { + if (!oldTarget.done){ + oldTarget.done = true; + oldTarget.result = oldTarget.apply(oldTarget, arguments); + } + return oldTarget.result; + }; + + })(t, global.target[t]); + } + + // Execute desired targets + if (args.length > 0) { + args.forEach(function(arg) { + if (arg in global.target) + global.target[arg](targetArgs); + else { + console.log('no such target: ' + arg); + } + }); + } else if ('all' in global.target) { + global.target.all(targetArgs); + } + +}, 0); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json new file mode 100644 index 0000000..a5c3299 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json @@ -0,0 +1,90 @@ +{ + "name": "shelljs", + "version": "0.10.0", + "description": "Portable Unix shell commands for Node.js", + "keywords": [ + "shelljs", + "bash", + "unix", + "shell", + "makefile", + "make", + "jake", + "synchronous" + ], + "contributors": [ + "Nate Fischer (https://github.com/nfischer)", + "Brandon Freitag (https://github.com/freitagbr)" + ], + "repository": { + "type": "git", + "url": "git://github.com/shelljs/shelljs.git" + }, + "license": "BSD-3-Clause", + "homepage": "http://github.com/shelljs/shelljs", + "main": "./shell.js", + "exports": { + ".": "./shell.js", + "./global": "./global.js", + "./global.js": "./global.js", + "./make": "./make.js", + "./make.js": "./make.js", + "./package": "./package.json", + "./package.json": "./package.json", + "./plugin": "./plugin.js", + "./plugin.js": "./plugin.js" + }, + "files": [ + "global.js", + "make.js", + "plugin.js", + "shell.js", + "src" + ], + "scripts": { + "check-node-support": "node scripts/check-node-support", + "posttest": "npm run lint", + "test": "ava", + "test-with-coverage": "nyc --reporter=text --reporter=lcov ava", + "gendocs": "node scripts/generate-docs", + "lint": "eslint .", + "after-travis": "travis-check-changes", + "changelog": "shelljs-changelog", + "release:major": "shelljs-release major", + "release:minor": "shelljs-release minor", + "release:patch": "shelljs-release patch" + }, + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "ava": { + "serial": true, + "workerThreads": false, + "powerAssert": false, + "files": [ + "test/*.js" + ], + "helpers": [ + "test/resources/**", + "test/utils/**" + ] + }, + "devDependencies": { + "ava": "^6.2.0", + "chalk": "^4.1.2", + "coffee-script": "^1.12.7", + "eslint": "^8.2.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-plugin-import": "^2.31.0", + "js-yaml": "^4.1.0", + "nyc": "^17.1.0", + "shelljs-changelog": "^0.2.6", + "shelljs-release": "^0.5.3", + "shx": "^0.4.0", + "travis-check-changes": "^0.5.1" + }, + "engines": { + "node": ">=18" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js new file mode 100644 index 0000000..2e15850 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js @@ -0,0 +1,16 @@ +// Various utilities exposed to plugins + +require('./shell'); // Create the ShellJS instance (mandatory) + +var common = require('./src/common'); + +var exportedAttributes = [ + 'error', // For signaling errors from within commands + 'parseOptions', // For custom option parsing + 'readFromPipe', // For commands with the .canReceivePipe attribute + 'register', // For registering plugins +]; + +exportedAttributes.forEach(function (attr) { + exports[attr] = common[attr]; +}); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js new file mode 100644 index 0000000..8a3a67d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js @@ -0,0 +1,216 @@ +// +// ShellJS +// Unix shell commands on top of Node's API +// +// Copyright (c) 2012 Artur Adib +// http://github.com/shelljs/shelljs +// + +var common = require('./src/common'); + +module.exports = common.shell; + +//@ +//@ All commands run synchronously, unless otherwise stated. +//@ All commands accept standard bash globbing characters (`*`, `?`, etc.), +//@ compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). +//@ +//@ For less-commonly used commands and features, please check out our [wiki +//@ page](https://github.com/shelljs/shelljs/wiki). +//@ + +// Include the docs for all the default commands +//@commands + +// Load all default commands. We import these for their side effect of loading +// using the plugin architecture via `common.register()`. +require('./src/cat'); +require('./src/cd'); +require('./src/chmod'); +require('./src/cmd'); +require('./src/cp'); +require('./src/dirs'); +require('./src/echo'); +require('./src/exec'); +require('./src/exec-child'); // A hint to the bundler to keep exec-child.js +require('./src/find'); +require('./src/grep'); +require('./src/head'); +require('./src/ln'); +require('./src/ls'); +require('./src/mkdir'); +require('./src/mv'); +require('./src/popd'); +require('./src/pushd'); +require('./src/pwd'); +require('./src/rm'); +require('./src/sed'); +require('./src/set'); +require('./src/sort'); +require('./src/tail'); +require('./src/tempdir'); +require('./src/test'); +require('./src/to'); +require('./src/toEnd'); +require('./src/touch'); +require('./src/uniq'); +require('./src/which'); + +//@ +//@ ### exit(code) +//@ +//@ Exits the current process with the given exit `code`. +module.exports.exit = function exit(code) { + common.state.error = null; + common.state.errorCode = 0; + if (code) { + common.error('exit', { + continue: true, + code, + prefix: '', + silent: true, + fatal: false, + }); + process.exit(code); + } else { + process.exit(); + } +}; + +//@include ./src/error.js +module.exports.error = require('./src/error'); + +//@include ./src/errorCode.js +module.exports.errorCode = require('./src/errorCode'); + +//@include ./src/common.js +module.exports.ShellString = common.ShellString; + +//@ +//@ ### env['VAR_NAME'] +//@ +//@ Object containing environment variables (both getter and setter). Shortcut +//@ to `process.env`. +module.exports.env = process.env; + +//@ +//@ ### Pipes +//@ +//@ Examples: +//@ +//@ ```javascript +//@ grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); +//@ echo("files with o's in the name:\n" + ls().grep('o')); +//@ cat('test.js').exec('node'); // pipe to exec() call +//@ ``` +//@ +//@ Commands can send their output to another command in a pipe-like fashion. +//@ `sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand +//@ side of a pipe. Pipes can be chained. + +//@ +//@ ## Configuration +//@ + +module.exports.config = common.config; + +//@ +//@ ### config.silent +//@ +//@ Example: +//@ +//@ ```javascript +//@ var sh = require('shelljs'); +//@ var silentState = sh.config.silent; // save old silent state +//@ sh.config.silent = true; +//@ /* ... */ +//@ sh.config.silent = silentState; // restore old silent state +//@ ``` +//@ +//@ Suppresses all command output if `true`, except for `echo()` calls. +//@ Default is `false`. + +//@ +//@ ### config.fatal +//@ +//@ Example: +//@ +//@ ```javascript +//@ require('shelljs/global'); +//@ config.fatal = true; // or set('-e'); +//@ cp('this_file_does_not_exist', '/dev/null'); // throws Error here +//@ /* more commands... */ +//@ ``` +//@ +//@ If `true`, the script will throw a Javascript error when any shell.js +//@ command encounters an error. Default is `false`. This is analogous to +//@ Bash's `set -e`. + +//@ +//@ ### config.verbose +//@ +//@ Example: +//@ +//@ ```javascript +//@ config.verbose = true; // or set('-v'); +//@ cd('dir/'); +//@ rm('-rf', 'foo.txt', 'bar.txt'); +//@ exec('echo hello'); +//@ ``` +//@ +//@ Will print each command as follows: +//@ +//@ ``` +//@ cd dir/ +//@ rm -rf foo.txt bar.txt +//@ exec echo hello +//@ ``` + +//@ +//@ ### config.globOptions (deprecated) +//@ +//@ **Deprecated**: we recommend that you do not edit `config.globOptions`. +//@ Support for this configuration option may be changed or removed in a future +//@ ShellJS release. +//@ +//@ **Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS +//@ v0.9.x, `config.globOptions` is compatible with `fast-glob`. +//@ +//@ Example: +//@ +//@ ```javascript +//@ config.globOptions = {nodir: true}; +//@ ``` +//@ +//@ `config.globOptions` changes how ShellJS expands glob (wildcard) +//@ expressions. See +//@ [fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) +//@ for available options. Be aware that modifying `config.globOptions` **may +//@ break ShellJS functionality.** + +//@ +//@ ### config.reset() +//@ +//@ Example: +//@ +//@ ```javascript +//@ var shell = require('shelljs'); +//@ // Make changes to shell.config, and do stuff... +//@ /* ... */ +//@ shell.config.reset(); // reset to original state +//@ // Do more stuff, but with original settings +//@ /* ... */ +//@ ``` +//@ +//@ Reset `shell.config` to the defaults: +//@ +//@ ```javascript +//@ { +//@ fatal: false, +//@ globOptions: {}, +//@ maxdepth: 255, +//@ noglob: false, +//@ silent: false, +//@ verbose: false, +//@ } +//@ ``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js new file mode 100644 index 0000000..ca264a9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js @@ -0,0 +1,76 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('cat', _cat, { + canReceivePipe: true, + cmdOptions: { + 'n': 'number', + }, +}); + +//@ +//@ ### cat([options,] file [, file ...]) +//@ ### cat([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-n`: number all output lines +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = cat('file*.txt'); +//@ var str = cat('file1', 'file2'); +//@ var str = cat(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) containing the given file, or a +//@ concatenated string containing the files if more than one file is given (a +//@ new line character is introduced between each file). +function _cat(options, files) { + var cat = common.readFromPipe(); + + if (!files && !cat) common.error('no paths given'); + + files = [].slice.call(arguments, 1); + + files.forEach(function (file) { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file); + } else if (common.statFollowLinks(file).isDirectory()) { + common.error(file + ': Is a directory'); + } + + cat += fs.readFileSync(file, 'utf8'); + }); + + if (options.number) { + cat = addNumbers(cat); + } + + return cat; +} +module.exports = _cat; + +function addNumbers(cat) { + var lines = cat.split('\n'); + var lastLine = lines.pop(); + + lines = lines.map(function (line, i) { + return numberedLine(i + 1, line); + }); + + if (lastLine.length) { + lastLine = numberedLine(lines.length + 1, lastLine); + } + lines.push(lastLine); + + return lines.join('\n'); +} + +function numberedLine(n, line) { + // GNU cat use six pad start number + tab. See http://lingrok.org/xref/coreutils/src/cat.c#57 + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/padStart + var number = (' ' + n).slice(-6) + '\t'; + return number + line; +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js new file mode 100644 index 0000000..1c6e73f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js @@ -0,0 +1,40 @@ +var os = require('os'); +var common = require('./common'); + +common.register('cd', _cd, {}); + +//@ +//@ ### cd([dir]) +//@ +//@ Changes to directory `dir` for the duration of the script. Changes to home +//@ directory if no argument is supplied. Returns a +//@ [ShellString](#shellstringstr) to indicate success or failure. +function _cd(options, dir) { + if (!dir) dir = os.homedir(); + + if (dir === '-') { + if (!process.env.OLDPWD) { + common.error('could not find previous directory'); + } else { + dir = process.env.OLDPWD; + } + } + + try { + var curDir = process.cwd(); + process.chdir(dir); + process.env.OLDPWD = curDir; + } catch (e) { + // something went wrong, let's figure out the error + var err; + try { + common.statFollowLinks(dir); // if this succeeds, it must be some sort of file + err = 'not a directory: ' + dir; + } catch (e2) { + err = 'no such file or directory: ' + dir; + } + if (err) common.error(err); + } + return ''; +} +module.exports = _cd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js new file mode 100644 index 0000000..b930cc7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js @@ -0,0 +1,222 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +var PERMS = (function (base) { + return { + OTHER_EXEC: base.EXEC, + OTHER_WRITE: base.WRITE, + OTHER_READ: base.READ, + + GROUP_EXEC: base.EXEC << 3, + GROUP_WRITE: base.WRITE << 3, + GROUP_READ: base.READ << 3, + + OWNER_EXEC: base.EXEC << 6, + OWNER_WRITE: base.WRITE << 6, + OWNER_READ: base.READ << 6, + + // Literal octal numbers are apparently not allowed in "strict" javascript. + STICKY: parseInt('01000', 8), + SETGID: parseInt('02000', 8), + SETUID: parseInt('04000', 8), + + TYPE_MASK: parseInt('0770000', 8), + }; +}({ + EXEC: 1, + WRITE: 2, + READ: 4, +})); + +common.register('chmod', _chmod, { +}); + +//@ +//@ ### chmod([options,] octal_mode || octal_string, file) +//@ ### chmod([options,] symbolic_mode, file) +//@ +//@ Available options: +//@ +//@ + `-v`: output a diagnostic for every file processed//@ +//@ + `-c`: like verbose, but report only when a change is made//@ +//@ + `-R`: change files and directories recursively//@ +//@ +//@ Examples: +//@ +//@ ```javascript +//@ chmod(755, '/Users/brandon'); +//@ chmod('755', '/Users/brandon'); // same as above +//@ chmod('u+x', '/Users/brandon'); +//@ chmod('-R', 'a-w', '/Users/brandon'); +//@ ``` +//@ +//@ Alters the permissions of a file or directory by either specifying the +//@ absolute permissions in octal form or expressing the changes in symbols. +//@ This command tries to mimic the POSIX behavior as much as possible. +//@ Notable exceptions: +//@ +//@ + In symbolic modes, `a-r` and `-r` are identical. No consideration is +//@ given to the `umask`. +//@ + There is no "quiet" option, since default behavior is to run silent. +//@ + Windows OS uses a very different permission model than POSIX. `chmod()` +//@ does its best on Windows, but there are limits to how file permissions can +//@ be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, +//@ so cross-platform compatibility should not be a concern there. +//@ +//@ Returns a [ShellString](#shellstringstr) indicating success or failure. +function _chmod(options, mode, filePattern) { + if (!filePattern) { + if (options.length > 0 && options.charAt(0) === '-') { + // Special case where the specified file permissions started with - to subtract perms, which + // get picked up by the option parser as command flags. + // If we are down by one argument and options starts with -, shift everything over. + [].unshift.call(arguments, ''); + } else { + common.error('You must specify a file.'); + } + } + + options = common.parseOptions(options, { + 'R': 'recursive', + 'c': 'changes', + 'v': 'verbose', + }); + + filePattern = [].slice.call(arguments, 2); + + var files; + + // TODO: replace this with a call to common.expand() + if (options.recursive) { + files = []; + filePattern.forEach(function addFile(expandedFile) { + var stat = common.statNoFollowLinks(expandedFile); + + if (!stat.isSymbolicLink()) { + files.push(expandedFile); + + if (stat.isDirectory()) { // intentionally does not follow symlinks. + fs.readdirSync(expandedFile).forEach(function (child) { + addFile(expandedFile + '/' + child); + }); + } + } + }); + } else { + files = filePattern; + } + + files.forEach(function innerChmod(file) { + file = path.resolve(file); + if (!fs.existsSync(file)) { + common.error('File not found: ' + file); + } + + // When recursing, don't follow symlinks. + if (options.recursive && common.statNoFollowLinks(file).isSymbolicLink()) { + return; + } + + var stat = common.statFollowLinks(file); + var isDir = stat.isDirectory(); + var perms = stat.mode; + var type = perms & PERMS.TYPE_MASK; + + var newPerms = perms; + + if (Number.isNaN(parseInt(mode, 8))) { + // parse options + mode.split(',').forEach(function (symbolicMode) { + var pattern = /([ugoa]*)([=+-])([rwxXst]*)/i; + var matches = pattern.exec(symbolicMode); + + if (matches) { + var applyTo = matches[1]; + var operator = matches[2]; + var change = matches[3]; + + var changeOwner = applyTo.includes('u') || applyTo === 'a' || applyTo === ''; + var changeGroup = applyTo.includes('g') || applyTo === 'a' || applyTo === ''; + var changeOther = applyTo.includes('o') || applyTo === 'a' || applyTo === ''; + + var changeRead = change.includes('r'); + var changeWrite = change.includes('w'); + var changeExec = change.includes('x'); + var changeExecDir = change.includes('X'); + var changeSticky = change.includes('t'); + var changeSetuid = change.includes('s'); + + if (changeExecDir && isDir) { + changeExec = true; + } + + var mask = 0; + if (changeOwner) { + mask |= (changeRead ? PERMS.OWNER_READ : 0) + (changeWrite ? PERMS.OWNER_WRITE : 0) + (changeExec ? PERMS.OWNER_EXEC : 0) + (changeSetuid ? PERMS.SETUID : 0); + } + if (changeGroup) { + mask |= (changeRead ? PERMS.GROUP_READ : 0) + (changeWrite ? PERMS.GROUP_WRITE : 0) + (changeExec ? PERMS.GROUP_EXEC : 0) + (changeSetuid ? PERMS.SETGID : 0); + } + if (changeOther) { + mask |= (changeRead ? PERMS.OTHER_READ : 0) + (changeWrite ? PERMS.OTHER_WRITE : 0) + (changeExec ? PERMS.OTHER_EXEC : 0); + } + + // Sticky bit is special - it's not tied to user, group or other. + if (changeSticky) { + mask |= PERMS.STICKY; + } + + switch (operator) { + case '+': + newPerms |= mask; + break; + + case '-': + newPerms &= ~mask; + break; + + case '=': + newPerms = type + mask; + + // According to POSIX, when using = to explicitly set the + // permissions, setuid and setgid can never be cleared. + if (common.statFollowLinks(file).isDirectory()) { + newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; + } + break; + default: + common.error('Could not recognize operator: `' + operator + '`'); + } + + if (options.verbose) { + console.log(file + ' -> ' + newPerms.toString(8)); + } + + if (perms !== newPerms) { + if (!options.verbose && options.changes) { + console.log(file + ' -> ' + newPerms.toString(8)); + } + fs.chmodSync(file, newPerms); + perms = newPerms; // for the next round of changes! + } + } else { + common.error('Invalid symbolic mode change: ' + symbolicMode); + } + }); + } else { + // they gave us a full number + newPerms = type + parseInt(mode, 8); + + // POSIX rules are that setuid and setgid can only be added using numeric + // form, but not cleared. + if (common.statFollowLinks(file).isDirectory()) { + newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; + } + + fs.chmodSync(file, newPerms); + } + }); + return ''; +} +module.exports = _chmod; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js new file mode 100644 index 0000000..a00d6c4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js @@ -0,0 +1,138 @@ +var execa = require('execa'); +var common = require('./common'); + +var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; +var COMMAND_NOT_FOUND_ERROR_CODE = 127; + +common.register('cmd', _cmd, { + cmdOptions: null, + globStart: 1, + canReceivePipe: true, + wrapOutput: true, +}); + +function isCommandNotFound(execaResult) { + if (process.platform === 'win32') { + var str = 'is not recognized as an internal or external command'; + return execaResult.exitCode && execaResult.stderr.includes(str); + } + return execaResult.failed && execaResult.code === 'ENOENT'; +} + +function isExecaInternalError(result) { + if (typeof result.stdout !== 'string') return true; + if (typeof result.stderr !== 'string') return true; + if (typeof result.exitCode !== 'number') return true; + if (result.exitCode === 0 && result.failed) return true; + // Otherwise assume this executed correctly. The command may still have exited + // with non-zero status, but that's not due to anything execa did. + return false; +} + +//@ +//@ ### cmd(arg1[, arg2, ...] [, options]) +//@ +//@ Available options: +//@ +//@ + `cwd: directoryPath`: change the current working directory only for this +//@ cmd() invocation. +//@ + `maxBuffer: num`: Raise or decrease the default buffer size for +//@ stdout/stderr. +//@ + `timeout`: Change the default timeout. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var version = cmd('node', '--version').stdout; +//@ cmd('git', 'commit', '-am', `Add suport for node ${version}`); +//@ console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) +//@ console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) +//@ ``` +//@ +//@ Executes the given command synchronously. This is intended as an easier +//@ alternative for [exec()](#execcommand--options--callback), with better +//@ security around globbing, comamnd injection, and variable expansion. This is +//@ guaranteed to only run one external command, and won't give special +//@ treatment for any shell characters (ex. this treats `|` as a literal +//@ character, not as a shell pipeline). +//@ This returns a [ShellString](#shellstringstr). +//@ +//@ By default, this performs globbing on all platforms, but you can disable +//@ this with `set('-f')`. +//@ +//@ This **does not** support asynchronous mode. If you need asynchronous +//@ command execution, check out [execa](https://www.npmjs.com/package/execa) or +//@ the node builtin `child_process.execFile()` instead. +function _cmd(options, command, commandArgs, userOptions) { + if (!command) { + common.error('Must specify a non-empty string as a command'); + } + + // `options` will usually not have a value: it's added by our commandline flag + // parsing engine. + commandArgs = [].slice.call(arguments, 2); + + // `userOptions` may or may not be provided. We need to check the last + // argument. If it's an object, assume it's meant to be passed as + // userOptions (since ShellStrings are already flattened to strings). + if (commandArgs.length === 0) { + userOptions = {}; + } else { + var lastArg = commandArgs.pop(); + if (common.isObject(lastArg)) { + userOptions = lastArg; + } else { + userOptions = {}; + commandArgs.push(lastArg); + } + } + + var pipe = common.readFromPipe(); + + // Some of our defaults differ from execa's defaults. These can be overridden + // by the user. + var defaultOptions = { + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + stripFinalNewline: false, // Preserve trailing newlines for consistency with unix. + reject: false, // Use ShellJS's error handling system. + }; + + // For other options, we forbid the user from overriding them (either for + // correctness or security). + var requiredOptions = { + input: pipe, + shell: false, + }; + + var execaOptions = + Object.assign(defaultOptions, userOptions, requiredOptions); + + var result = execa.sync(command, commandArgs, execaOptions); + var stdout; + var stderr; + var code; + if (isCommandNotFound(result)) { + // This can happen if `command` is not an executable binary, or possibly + // under other conditions. + stdout = ''; + stderr = "'" + command + "': command not found"; + code = COMMAND_NOT_FOUND_ERROR_CODE; + } else if (isExecaInternalError(result)) { + // Catch-all: execa tried to run `command` but it encountered some error + // (ex. maxBuffer, timeout). + stdout = result.stdout || ''; + stderr = result.stderr || + `'${command}' encountered an error during execution`; + code = result.exitCode !== undefined && result.exitCode > 0 ? result.exitCode : 1; + } else { + // Normal exit: execa was able to execute `command` and get a return value. + stdout = result.stdout.toString(); + stderr = result.stderr.toString(); + code = result.exitCode; + } + + // Pass `continue: true` so we can specify a value for stdout. + if (code) common.error(stderr, code, { silent: true, continue: true }); + return new common.ShellString(stdout, stderr, code); +} +module.exports = _cmd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js new file mode 100644 index 0000000..b9ffeda --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js @@ -0,0 +1,545 @@ +// Ignore warning about 'new String()' and use of the Buffer constructor +/* eslint no-new-wrappers: "off", + no-buffer-constructor: "off" */ + +'use strict'; + +var os = require('os'); +var fs = require('fs'); +var glob = require('fast-glob'); + +var shell = {}; +exports.shell = shell; + +var shellMethods = Object.create(shell); + +exports.extend = Object.assign; + +// Check if we're running under electron +var isElectron = Boolean(process.versions.electron); + +// Module globals (assume no execPath by default) +var DEFAULT_CONFIG = { + fatal: false, + globOptions: {}, + maxdepth: 255, + noglob: false, + silent: false, + verbose: false, + execPath: null, + bufLength: 64 * 1024, // 64KB +}; + +var config = { + reset() { + Object.assign(this, DEFAULT_CONFIG); + if (!isElectron) { + this.execPath = process.execPath; + } + }, + resetForTesting() { + this.reset(); + this.silent = true; + }, +}; + +config.reset(); +exports.config = config; + +// Note: commands should generally consider these as read-only values. +var state = { + error: null, + errorCode: 0, + currentCmd: 'shell.js', +}; +exports.state = state; + +delete process.env.OLDPWD; // initially, there's no previous directory + +// Reliably test if something is any sort of javascript object +function isObject(a) { + return typeof a === 'object' && a !== null; +} +exports.isObject = isObject; + +function log() { + /* istanbul ignore next */ + if (!config.silent) { + console.error.apply(console, arguments); + } +} +exports.log = log; + +// Converts strings to be equivalent across all platforms. Primarily responsible +// for making sure we use '/' instead of '\' as path separators, but this may be +// expanded in the future if necessary +function convertErrorOutput(msg) { + if (typeof msg !== 'string') { + throw new TypeError('input must be a string'); + } + return msg.replace(/\\/g, '/'); +} +exports.convertErrorOutput = convertErrorOutput; + +// An exception class to help propagate command errors (e.g., non-zero exit +// status) up to the top-level. {@param value} should be a ShellString. +class CommandError extends Error { + constructor(value) { + super(value.toString()); + this.returnValue = value; + } +} +exports.CommandError = CommandError; // visible for testing + +// Shows error message. Throws if fatal is true (defaults to config.fatal, overridable with options.fatal) +function error(msg, _code, options) { + // Validate input + if (typeof msg !== 'string') throw new Error('msg must be a string'); + + var DEFAULT_OPTIONS = { + continue: false, + code: 1, + prefix: state.currentCmd + ': ', + silent: false, + fatal: config.fatal, + }; + + if (typeof _code === 'number' && isObject(options)) { + options.code = _code; + } else if (isObject(_code)) { // no 'code' + options = _code; + } else if (typeof _code === 'number') { // no 'options' + options = { code: _code }; + } else if (typeof _code !== 'number') { // only 'msg' + options = {}; + } + options = Object.assign({}, DEFAULT_OPTIONS, options); + + if (!state.errorCode) state.errorCode = options.code; + + var logEntry = convertErrorOutput(options.prefix + msg); + state.error = state.error ? state.error + '\n' : ''; + state.error += logEntry; + + // Throw an error, or log the entry + if (options.fatal) { + var err = new Error(logEntry); + err.code = options.code; + throw err; + } + if (msg.length > 0 && !options.silent) log(logEntry); + + if (!options.continue) { + throw new CommandError(new ShellString('', state.error, state.errorCode)); + } +} +exports.error = error; + +//@ +//@ ### ShellString(str) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var foo = new ShellString('hello world'); +//@ ``` +//@ +//@ This is a dedicated type returned by most ShellJS methods, which wraps a +//@ string (or array) value. This has all the string (or array) methods, but +//@ also exposes extra methods: [`.to()`](#shellstringprototypetofile), +//@ [`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods +//@ (ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string +//@ by calling `.toString()`. +//@ +//@ This type also exposes the corresponding command's stdout, stderr, and +//@ return status code via the `.stdout` (string), `.stderr` (string), and +//@ `.code` (number) properties respectively. +function ShellString(stdout, stderr, code) { + var that; + if (stdout instanceof Array) { + that = stdout; + that.stdout = stdout.join('\n'); + if (stdout.length > 0) that.stdout += '\n'; + } else { + that = new String(stdout); + that.stdout = stdout; + } + that.stderr = stderr; + that.code = code; + // A list of all commands that can appear on the right-hand side of a pipe + // (populated by calls to common.wrap()) + pipeMethods.forEach(function (cmd) { + that[cmd] = shellMethods[cmd].bind(that); + }); + return that; +} + +exports.ShellString = ShellString; + +// Returns {'alice': true, 'bob': false} when passed a string and dictionary as follows: +// parseOptions('-a', {'a':'alice', 'b':'bob'}); +// Returns {'reference': 'string-value', 'bob': false} when passed two dictionaries of the form: +// parseOptions({'-r': 'string-value'}, {'r':'reference', 'b':'bob'}); +// Throws an error when passed a string that does not start with '-': +// parseOptions('a', {'a':'alice'}); // throws +function parseOptions(opt, map, errorOptions) { + errorOptions = errorOptions || {}; + // Validate input + if (typeof opt !== 'string' && !isObject(opt)) { + throw new TypeError('options must be strings or key-value pairs'); + } else if (!isObject(map)) { + throw new TypeError('parseOptions() internal error: map must be an object'); + } else if (!isObject(errorOptions)) { + throw new TypeError( + 'parseOptions() internal error: errorOptions must be object', + ); + } + + if (opt === '--') { + // This means there are no options. + return {}; + } + + // All options are false by default + var options = {}; + Object.keys(map).forEach(function (letter) { + var optName = map[letter]; + if (optName[0] !== '!') { + options[optName] = false; + } + }); + + if (opt === '') return options; // defaults + + if (typeof opt === 'string') { + if (opt[0] !== '-') { + throw new Error("Options string must start with a '-'"); + } + + // e.g. chars = ['R', 'f'] + var chars = opt.slice(1).split(''); + + chars.forEach(function (c) { + if (c in map) { + var optionName = map[c]; + if (optionName[0] === '!') { + options[optionName.slice(1)] = false; + } else { + options[optionName] = true; + } + } else { + error('option not recognized: ' + c, errorOptions); + } + }); + } else { // opt is an Object + Object.keys(opt).forEach(function (key) { + if (key[0] === '-') { + // key is a string of the form '-r', '-d', etc. + var c = key[1]; + if (c in map) { + var optionName = map[c]; + options[optionName] = opt[key]; // assign the given value + } else { + error('option not recognized: ' + c, errorOptions); + } + } else if (key in options) { + // key is a "long option", so it should be the same + options[key] = opt[key]; + } else { + error('option not recognized: {' + key + ':...}', errorOptions); + } + }); + } + return options; +} +exports.parseOptions = parseOptions; + +function globOptions() { + // These options are just to make fast-glob be compatible with POSIX (bash) + // wildcard behavior. + var defaultGlobOptions = { + onlyFiles: false, + followSymbolicLinks: false, + }; + + var newGlobOptions = Object.assign({}, config.globOptions); + var optionRenames = { + // node-glob's 'nodir' is not quote the same as fast-glob's 'onlyFiles'. + // Compatibility for this is implemented at the call site. + mark: 'markDirectories', + matchBase: 'baseNameMatch', + }; + Object.keys(optionRenames).forEach(function (oldKey) { + var newKey = optionRenames[oldKey]; + if (oldKey in config.globOptions) { + newGlobOptions[newKey] = config.globOptions[oldKey]; + } + }); + var invertedOptionRenames = { + nobrace: 'braceExpansion', + noglobstar: 'globstar', + noext: 'extglob', + nocase: 'caseSensitiveMatch', + }; + Object.keys(invertedOptionRenames).forEach(function (oldKey) { + var newKey = invertedOptionRenames[oldKey]; + if (oldKey in config.globOptions) { + newGlobOptions[newKey] = !config.globOptions[oldKey]; + } + }); + return Object.assign({}, defaultGlobOptions, newGlobOptions); +} + +// Expands wildcards with matching (ie. existing) file names. +// For example: +// expand(['file*.js']) = ['file1.js', 'file2.js', ...] +// (if the files 'file1.js', 'file2.js', etc, exist in the current dir) +function expand(list) { + if (!Array.isArray(list)) { + throw new TypeError('must be an array'); + } + var expanded = []; + list.forEach(function (listEl) { + // Don't expand non-strings + if (typeof listEl !== 'string') { + expanded.push(listEl); + } else { + var ret; + var globOpts = globOptions(); + try { + ret = glob.sync(listEl, globOpts); + } catch (e) { + // if glob fails, interpret the string literally + ret = [listEl]; + } + // if nothing matched, interpret the string literally + ret = ret.length > 0 ? ret.sort() : [listEl]; + if (globOpts.nodir) { + ret = ret.filter(function (file) { + return !statNoFollowLinks(file).isDirectory(); + }); + } + expanded = expanded.concat(ret); + } + }); + return expanded; +} +exports.expand = expand; + +// Normalizes Buffer creation, using Buffer.alloc if possible. +// Also provides a good default buffer length for most use cases. +var buffer = typeof Buffer.alloc === 'function' ? + function (len) { + return Buffer.alloc(len || config.bufLength); + } : + function (len) { + return new Buffer(len || config.bufLength); + }; +exports.buffer = buffer; + +// Normalizes _unlinkSync() across platforms to match Unix behavior, i.e. +// file can be unlinked even if it's read-only, see https://github.com/joyent/node/issues/3006 +function unlinkSync(file) { + try { + fs.unlinkSync(file); + } catch (e) { + // Try to override file permission + /* istanbul ignore next */ + if (e.code === 'EPERM') { + fs.chmodSync(file, '0666'); + fs.unlinkSync(file); + } else { + throw e; + } + } +} +exports.unlinkSync = unlinkSync; + +// wrappers around common.statFollowLinks and common.statNoFollowLinks that clarify intent +// and improve readability +function statFollowLinks() { + return fs.statSync.apply(fs, arguments); +} +exports.statFollowLinks = statFollowLinks; + +function statNoFollowLinks() { + return fs.lstatSync.apply(fs, arguments); +} +exports.statNoFollowLinks = statNoFollowLinks; + +// e.g. 'shelljs_a5f185d0443ca...' +function randomFileName() { + function randomHash(count) { + if (count === 1) { + return parseInt(16 * Math.random(), 10).toString(16); + } + var hash = ''; + for (var i = 0; i < count; i++) { + hash += randomHash(1); + } + return hash; + } + + return 'shelljs_' + randomHash(20); +} +exports.randomFileName = randomFileName; + +// Common wrapper for all Unix-like commands that performs glob expansion, +// command-logging, and other nice things +function wrap(cmd, fn, options) { + options = options || {}; + return function () { + var retValue = null; + + state.currentCmd = cmd; + state.error = null; + state.errorCode = 0; + + try { + var args = [].slice.call(arguments, 0); + + // Log the command to stderr, if appropriate + if (config.verbose) { + console.error.apply(console, [cmd].concat(args)); + } + + // If this is coming from a pipe, let's set the pipedValue (otherwise, set + // it to the empty string) + state.pipedValue = (this && typeof this.stdout === 'string') ? this.stdout : ''; + + if (options.unix === false) { // this branch is for exec() + retValue = fn.apply(this, args); + } else { // and this branch is for everything else + if (isObject(args[0]) && args[0].constructor.name === 'Object') { + // a no-op, allowing the syntax `touch({'-r': file}, ...)` + } else if (args.length === 0 || typeof args[0] !== 'string' || args[0].length <= 1 || args[0][0] !== '-') { + args.unshift(''); // only add dummy option if '-option' not already present + } + + // flatten out arrays that are arguments, to make the syntax: + // `cp([file1, file2, file3], dest);` + // equivalent to: + // `cp(file1, file2, file3, dest);` + args = args.reduce(function (accum, cur) { + if (Array.isArray(cur)) { + return accum.concat(cur); + } + accum.push(cur); + return accum; + }, []); + + // Convert ShellStrings (basically just String objects) to regular strings + args = args.map(function (arg) { + if (isObject(arg) && arg.constructor.name === 'String') { + return arg.toString(); + } + return arg; + }); + + // Expand the '~' if appropriate + var homeDir = os.homedir(); + args = args.map(function (arg) { + if (typeof arg === 'string' && arg.slice(0, 2) === '~/' || arg === '~') { + return arg.replace(/^~/, homeDir); + } + return arg; + }); + + // Perform glob-expansion on all arguments after globStart, but preserve + // the arguments before it (like regexes for sed and grep) + if (!config.noglob && options.allowGlobbing === true) { + args = args.slice(0, options.globStart).concat(expand(args.slice(options.globStart))); + } + + try { + // parse options if options are provided + if (isObject(options.cmdOptions)) { + args[0] = parseOptions(args[0], options.cmdOptions); + } + + retValue = fn.apply(this, args); + } catch (e) { + /* istanbul ignore else */ + if (e instanceof CommandError) { + retValue = e.returnValue; + } else { + throw e; // this is probably a bug that should be thrown up the call stack + } + } + } + } catch (e) { + /* istanbul ignore next */ + if (!state.error) { + // If state.error hasn't been set it's an error thrown by Node, not us - probably a bug... + e.name = 'ShellJSInternalError'; + throw e; + } + if (config.fatal || options.handlesFatalDynamically) throw e; + } + + if (options.wrapOutput && + (typeof retValue === 'string' || Array.isArray(retValue))) { + retValue = new ShellString(retValue, state.error, state.errorCode); + } + + state.currentCmd = 'shell.js'; + return retValue; + }; +} // wrap +exports.wrap = wrap; + +// This returns all the input that is piped into the current command (or the +// empty string, if this isn't on the right-hand side of a pipe +function _readFromPipe() { + return state.pipedValue; +} +exports.readFromPipe = _readFromPipe; + +var DEFAULT_WRAP_OPTIONS = { + allowGlobbing: true, + canReceivePipe: false, + cmdOptions: null, + globStart: 1, + handlesFatalDynamically: false, + pipeOnly: false, + wrapOutput: true, + unix: true, +}; + +// This is populated during plugin registration +var pipeMethods = []; + +// Register a new ShellJS command +function _register(name, implementation, wrapOptions) { + wrapOptions = wrapOptions || {}; + + // Validate options + Object.keys(wrapOptions).forEach(function (option) { + if (!DEFAULT_WRAP_OPTIONS.hasOwnProperty(option)) { + throw new Error("Unknown option '" + option + "'"); + } + if (typeof wrapOptions[option] !== typeof DEFAULT_WRAP_OPTIONS[option]) { + throw new TypeError("Unsupported type '" + typeof wrapOptions[option] + + "' for option '" + option + "'"); + } + }); + + // If an option isn't specified, use the default + wrapOptions = Object.assign({}, DEFAULT_WRAP_OPTIONS, wrapOptions); + + if (shell.hasOwnProperty(name)) { + throw new Error('Command `' + name + '` already exists'); + } + + if (wrapOptions.pipeOnly) { + wrapOptions.canReceivePipe = true; + shellMethods[name] = wrap(name, implementation, wrapOptions); + } else { + shell[name] = wrap(name, implementation, wrapOptions); + } + + if (wrapOptions.canReceivePipe) { + pipeMethods.push(name); + } +} +exports.register = _register; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js new file mode 100644 index 0000000..af4a0a1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js @@ -0,0 +1,314 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('cp', _cp, { + cmdOptions: { + 'f': '!no_force', + 'n': 'no_force', + 'u': 'update', + 'R': 'recursive', + 'r': 'recursive', + 'L': 'followsymlink', + 'P': 'noFollowsymlink', + 'p': 'preserve', + }, + wrapOutput: false, +}); + +// Buffered file copy, synchronous +// (Using readFileSync() + writeFileSync() could easily cause a memory overflow +// with large files) +function copyFileSync(srcFile, destFile, options) { + if (!fs.existsSync(srcFile)) { + common.error('copyFileSync: no such file or directory: ' + srcFile); + } + + var isWindows = process.platform === 'win32'; + + // Check the mtimes of the files if the '-u' flag is provided + try { + if (options.update && common.statFollowLinks(srcFile).mtime < fs.statSync(destFile).mtime) { + return; + } + } catch (e) { + // If we're here, destFile probably doesn't exist, so just do a normal copy + } + + if (common.statNoFollowLinks(srcFile).isSymbolicLink() && !options.followsymlink) { + try { + common.statNoFollowLinks(destFile); + common.unlinkSync(destFile); // re-link it + } catch (e) { + // it doesn't exist, so no work needs to be done + } + + var symlinkFull = fs.readlinkSync(srcFile); + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + } else { + var buf = common.buffer(); + var bufLength = buf.length; + var bytesRead = bufLength; + var pos = 0; + var fdr = null; + var fdw = null; + var srcStat = common.statFollowLinks(srcFile); + + try { + fdr = fs.openSync(srcFile, 'r'); + } catch (e) { + /* istanbul ignore next */ + common.error('copyFileSync: could not read src file (' + srcFile + ')'); + } + + try { + fdw = fs.openSync(destFile, 'w', srcStat.mode); + } catch (e) { + /* istanbul ignore next */ + common.error('copyFileSync: could not write to dest file (code=' + e.code + '):' + destFile); + } + + while (bytesRead === bufLength) { + bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); + fs.writeSync(fdw, buf, 0, bytesRead); + pos += bytesRead; + } + + if (options.preserve) { + fs.fchownSync(fdw, srcStat.uid, srcStat.gid); + // Note: utimesSync does not work (rounds to seconds), but futimesSync has + // millisecond precision. + fs.futimesSync(fdw, srcStat.atime, srcStat.mtime); + } + + fs.closeSync(fdr); + fs.closeSync(fdw); + } +} + +// Recursively copies 'sourceDir' into 'destDir' +// Adapted from https://github.com/ryanmcgrath/wrench-js +// +// Copyright (c) 2010 Ryan McGrath +// Copyright (c) 2012 Artur Adib +// +// Licensed under the MIT License +// http://www.opensource.org/licenses/mit-license.php +function cpdirSyncRecursive(sourceDir, destDir, currentDepth, opts) { + if (!opts) opts = {}; + + // Ensure there is not a run away recursive copy + if (currentDepth >= common.config.maxdepth) return; + currentDepth++; + + var isWindows = process.platform === 'win32'; + + // Create the directory where all our junk is moving to; read the mode/etc. of + // the source directory (we'll set this on the destDir at the end). + var checkDir = common.statFollowLinks(sourceDir); + try { + fs.mkdirSync(destDir); + } catch (e) { + // if the directory already exists, that's okay + if (e.code !== 'EEXIST') throw e; + } + + var files = fs.readdirSync(sourceDir); + + for (var i = 0; i < files.length; i++) { + var srcFile = sourceDir + '/' + files[i]; + var destFile = destDir + '/' + files[i]; + var srcFileStat = common.statNoFollowLinks(srcFile); + + var symlinkFull; + if (opts.followsymlink) { + if (cpcheckcycle(sourceDir, srcFile)) { + // Cycle link found. + console.error('Cycle link found.'); + symlinkFull = fs.readlinkSync(srcFile); + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + continue; + } + } + if (srcFileStat.isDirectory()) { + /* recursion this thing right on back. */ + cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); + } else if (srcFileStat.isSymbolicLink() && !opts.followsymlink) { + symlinkFull = fs.readlinkSync(srcFile); + try { + common.statNoFollowLinks(destFile); + common.unlinkSync(destFile); // re-link it + } catch (e) { + // it doesn't exist, so no work needs to be done + } + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + } else if (srcFileStat.isSymbolicLink() && opts.followsymlink) { + srcFileStat = common.statFollowLinks(srcFile); + if (srcFileStat.isDirectory()) { + cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); + } else { + copyFileSync(srcFile, destFile, opts); + } + } else if (fs.existsSync(destFile) && opts.no_force) { + common.log('skipping existing file: ' + files[i]); + } else { + copyFileSync(srcFile, destFile, opts); + } + } // for files + + // finally change the mode for the newly created directory (otherwise, we + // couldn't add files to a read-only directory). + // var checkDir = common.statFollowLinks(sourceDir); + if (opts.preserve) { + fs.utimesSync(destDir, checkDir.atime, checkDir.mtime); + } + fs.chmodSync(destDir, checkDir.mode); +} // cpdirSyncRecursive + +// Checks if cureent file was created recently +function checkRecentCreated(sources, index) { + var lookedSource = sources[index]; + return sources.slice(0, index).some(function (src) { + return path.basename(src) === path.basename(lookedSource); + }); +} + +function cpcheckcycle(sourceDir, srcFile) { + var srcFileStat = common.statNoFollowLinks(srcFile); + if (srcFileStat.isSymbolicLink()) { + // Do cycle check. For example: + // $ mkdir -p 1/2/3/4 + // $ cd 1/2/3/4 + // $ ln -s ../../3 link + // $ cd ../../../.. + // $ cp -RL 1 copy + var cyclecheck = common.statFollowLinks(srcFile); + if (cyclecheck.isDirectory()) { + var sourcerealpath = fs.realpathSync(sourceDir); + var symlinkrealpath = fs.realpathSync(srcFile); + var re = new RegExp(symlinkrealpath); + if (re.test(sourcerealpath)) { + return true; + } + } + } + return false; +} + +//@ +//@ ### cp([options,] source [, source ...], dest) +//@ ### cp([options,] source_array, dest) +//@ +//@ Available options: +//@ +//@ + `-f`: force (default behavior) +//@ + `-n`: no-clobber +//@ + `-u`: only copy if `source` is newer than `dest` +//@ + `-r`, `-R`: recursive +//@ + `-L`: follow symlinks +//@ + `-P`: don't follow symlinks +//@ + `-p`: preserve file mode, ownership, and timestamps +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cp('file1', 'dir1'); +//@ cp('-R', 'path/to/dir/', '~/newCopy/'); +//@ cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); +//@ cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above +//@ ``` +//@ +//@ Copies files. Returns a [ShellString](#shellstringstr) indicating success +//@ or failure. +function _cp(options, sources, dest) { + // If we're missing -R, it actually implies -L (unless -P is explicit) + if (options.followsymlink) { + options.noFollowsymlink = false; + } + if (!options.recursive && !options.noFollowsymlink) { + options.followsymlink = true; + } + + // Get sources, dest + if (arguments.length < 3) { + common.error('missing and/or '); + } else { + sources = [].slice.call(arguments, 1, arguments.length - 1); + dest = arguments[arguments.length - 1]; + } + + var destExists = fs.existsSync(dest); + var destStat = destExists && common.statFollowLinks(dest); + + // Dest is not existing dir, but multiple sources given + if ((!destExists || !destStat.isDirectory()) && sources.length > 1) { + common.error('dest is not a directory (too many sources)'); + } + + // Dest is an existing file, but -n is given + if (destExists && destStat.isFile() && options.no_force) { + return new common.ShellString('', '', 0); + } + + sources.forEach(function (src, srcIndex) { + if (!fs.existsSync(src)) { + if (src === '') src = "''"; // if src was empty string, display empty string + common.error('no such file or directory: ' + src, { continue: true }); + return; // skip file + } + var srcStat = common.statFollowLinks(src); + if (!options.noFollowsymlink && srcStat.isDirectory()) { + if (!options.recursive) { + // Non-Recursive + common.error("omitting directory '" + src + "'", { continue: true }); + } else { + // Recursive + // 'cp /a/source dest' should create 'source' in 'dest' + var newDest = (destStat && destStat.isDirectory()) ? + path.join(dest, path.basename(src)) : + dest; + + try { + common.statFollowLinks(path.dirname(dest)); + cpdirSyncRecursive(src, newDest, 0, options); + } catch (e) { + /* istanbul ignore next */ + common.error("cannot create directory '" + dest + "': No such file or directory"); + } + } + } else { + // If here, src is a file + + // When copying to '/path/dir': + // thisDest = '/path/dir/file1' + var thisDest = dest; + if (destStat && destStat.isDirectory()) { + thisDest = path.normalize(dest + '/' + path.basename(src)); + } + + var thisDestExists = fs.existsSync(thisDest); + if (thisDestExists && checkRecentCreated(sources, srcIndex)) { + // cannot overwrite file created recently in current execution, but we want to continue copying other files + if (!options.no_force) { + common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); + } + return; + } + + if (thisDestExists && options.no_force) { + return; // skip file + } + + if (path.relative(src, thisDest) === '') { + // a file cannot be copied to itself, but we want to continue copying other files + common.error("'" + thisDest + "' and '" + src + "' are the same file", { continue: true }); + return; + } + + copyFileSync(src, thisDest, options); + } + }); // forEach(src) + + return new common.ShellString('', common.state.error, common.state.errorCode); +} +module.exports = _cp; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js new file mode 100644 index 0000000..9b7251d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js @@ -0,0 +1,210 @@ +var path = require('path'); +var common = require('./common'); +var _cd = require('./cd'); + +common.register('dirs', _dirs, { + wrapOutput: false, +}); +common.register('pushd', _pushd, { + wrapOutput: false, +}); +common.register('popd', _popd, { + wrapOutput: false, +}); + +// Pushd/popd/dirs internals +var _dirStack = []; + +function _isStackIndex(index) { + return (/^[-+]\d+$/).test(index); +} + +function _parseStackIndex(index) { + if (_isStackIndex(index)) { + if (Math.abs(index) < _dirStack.length + 1) { // +1 for pwd + return (/^-/).test(index) ? Number(index) - 1 : Number(index); + } + common.error(index + ': directory stack index out of range'); + } else { + common.error(index + ': invalid number'); + } +} + +function _actualDirStack() { + return [process.cwd()].concat(_dirStack); +} + +//@ +//@ ### pushd([options,] [dir | '-N' | '+N']) +//@ +//@ Available options: +//@ +//@ + `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. +//@ + `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. +//@ + `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ // process.cwd() === '/usr' +//@ pushd('/etc'); // Returns /etc /usr +//@ pushd('+1'); // Returns /usr /etc +//@ ``` +//@ +//@ Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. +function _pushd(options, dir) { + if (_isStackIndex(options)) { + dir = options; + options = ''; + } + + options = common.parseOptions(options, { + 'n': 'no-cd', + 'q': 'quiet', + }); + + var dirs = _actualDirStack(); + + if (dir === '+0') { + return dirs; // +0 is a noop + } else if (!dir) { + if (dirs.length > 1) { + dirs = dirs.splice(1, 1).concat(dirs); + } else { + return common.error('no other directory'); + } + } else if (_isStackIndex(dir)) { + var n = _parseStackIndex(dir); + dirs = dirs.slice(n).concat(dirs.slice(0, n)); + } else if (options['no-cd']) { + dirs.splice(1, 0, dir); + } else { + dirs.unshift(dir); + } + + if (options['no-cd']) { + dirs = dirs.slice(1); + } else { + dir = path.resolve(dirs.shift()); + _cd('', dir); + } + + _dirStack = dirs; + return _dirs(options.quiet ? '-q' : ''); +} +exports.pushd = _pushd; + +//@ +//@ +//@ ### popd([options,] ['-N' | '+N']) +//@ +//@ Available options: +//@ +//@ + `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. +//@ + `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ echo(process.cwd()); // '/usr' +//@ pushd('/etc'); // '/etc /usr' +//@ echo(process.cwd()); // '/etc' +//@ popd(); // '/usr' +//@ echo(process.cwd()); // '/usr' +//@ ``` +//@ +//@ When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. +function _popd(options, index) { + if (_isStackIndex(options)) { + index = options; + options = ''; + } + + options = common.parseOptions(options, { + 'n': 'no-cd', + 'q': 'quiet', + }); + + if (!_dirStack.length) { + return common.error('directory stack empty'); + } + + index = _parseStackIndex(index || '+0'); + + if (options['no-cd'] || index > 0 || _dirStack.length + index === 0) { + index = index > 0 ? index - 1 : index; + _dirStack.splice(index, 1); + } else { + var dir = path.resolve(_dirStack.shift()); + _cd('', dir); + } + + return _dirs(options.quiet ? '-q' : ''); +} +exports.popd = _popd; + +//@ +//@ +//@ ### dirs([options | '+N' | '-N']) +//@ +//@ Available options: +//@ +//@ + `-c`: Clears the directory stack by deleting all of the elements. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. +//@ + `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. +//@ +//@ Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. +//@ +//@ See also: `pushd`, `popd` +function _dirs(options, index) { + if (_isStackIndex(options)) { + index = options; + options = ''; + } + + options = common.parseOptions(options, { + 'c': 'clear', + 'q': 'quiet', + }); + + if (options.clear) { + _dirStack = []; + return _dirStack; + } + + var stack = _actualDirStack(); + + if (index) { + index = _parseStackIndex(index); + + if (index < 0) { + index = stack.length + index; + } + + if (!options.quiet) { + common.log(stack[index]); + } + return stack[index]; + } + + if (!options.quiet) { + common.log(stack.join(' ')); + } + + return stack; +} +exports.dirs = _dirs; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js new file mode 100644 index 0000000..da37f43 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js @@ -0,0 +1,62 @@ +var format = require('util').format; + +var common = require('./common'); + +common.register('echo', _echo, { + allowGlobbing: false, +}); + +//@ +//@ ### echo([options,] string [, string ...]) +//@ +//@ Available options: +//@ +//@ + `-e`: interpret backslash escapes (default) +//@ + `-n`: remove trailing newline from output +//@ +//@ Examples: +//@ +//@ ```javascript +//@ echo('hello world'); +//@ var str = echo('hello world'); +//@ echo('-n', 'no newline at end'); +//@ ``` +//@ +//@ Prints `string` to stdout, and returns a [ShellString](#shellstringstr). +function _echo(opts) { + // allow strings starting with '-', see issue #20 + var messages = [].slice.call(arguments, opts ? 0 : 1); + var options = {}; + + // If the first argument starts with '-', parse it as options string. + // If parseOptions throws, it wasn't an options string. + try { + options = common.parseOptions(messages[0], { + 'e': 'escapes', + 'n': 'no_newline', + }, { + silent: true, + }); + + // Allow null to be echoed + if (messages[0]) { + messages.shift(); + } + } catch (_) { + // Clear out error if an error occurred + common.state.error = null; + } + + var output = format.apply(null, messages); + + // Add newline if -n is not passed. + if (!options.no_newline) { + output += '\n'; + } + + process.stdout.write(output); + + return output; +} + +module.exports = _echo; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js new file mode 100644 index 0000000..b0ed59e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js @@ -0,0 +1,15 @@ +var common = require('./common'); + +//@ +//@ ### error() +//@ +//@ Tests if error occurred in the last command. Returns a truthy value if an +//@ error returned, or a falsy value otherwise. +//@ +//@ **Note**: do not rely on the +//@ return value to be an error message. If you need the last error message, use +//@ the `.stderr` attribute from the last command's return value instead. +function error() { + return common.state.error; +} +module.exports = error; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js new file mode 100644 index 0000000..a1c7fd2 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js @@ -0,0 +1,10 @@ +var common = require('./common'); + +//@ +//@ ### errorCode() +//@ +//@ Returns the error code from the last command. +function errorCode() { + return common.state.errorCode; +} +module.exports = errorCode; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js new file mode 100644 index 0000000..e8446f6 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js @@ -0,0 +1,71 @@ +var childProcess = require('child_process'); +var fs = require('fs'); + +function main() { + var paramFilePath = process.argv[2]; + + var serializedParams = fs.readFileSync(paramFilePath, 'utf8'); + var params = JSON.parse(serializedParams); + + var cmd = params.command; + var execOptions = params.execOptions; + var pipe = params.pipe; + var stdoutFile = params.stdoutFile; + var stderrFile = params.stderrFile; + + function isMaxBufferError(err) { + var maxBufferErrorPattern = /^.*\bmaxBuffer\b.*exceeded.*$/; + if (err instanceof Error && err.message && + err.message.match(maxBufferErrorPattern)) { + // < v10 + // Error: stdout maxBuffer exceeded + return true; + } else if (err instanceof RangeError && err.message && + err.message.match(maxBufferErrorPattern)) { + // >= v10 + // RangeError [ERR_CHILD_PROCESS_STDIO_MAXBUFFER]: stdout maxBuffer length + // exceeded + return true; + } + return false; + } + + var stdoutStream = fs.createWriteStream(stdoutFile); + var stderrStream = fs.createWriteStream(stderrFile); + + function appendError(message, code) { + stderrStream.write(message); + process.exitCode = code; + } + + var c = childProcess.exec(cmd, execOptions, function (err) { + if (!err) { + process.exitCode = 0; + } else if (isMaxBufferError(err)) { + appendError('maxBuffer exceeded', 1); + } else if (err.code === undefined && err.message) { + /* istanbul ignore next */ + appendError(err.message, 1); + } else if (err.code === undefined) { + /* istanbul ignore next */ + appendError('Unknown issue', 1); + } else { + process.exitCode = err.code; + } + }); + + c.stdout.pipe(stdoutStream); + c.stderr.pipe(stderrStream); + c.stdout.pipe(process.stdout); + c.stderr.pipe(process.stderr); + + if (pipe) { + c.stdin.end(pipe); + } +} + +// This file should only be executed. This module does not export anything. +/* istanbul ignore else */ +if (require.main === module) { + main(); +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js new file mode 100644 index 0000000..3907769 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js @@ -0,0 +1,255 @@ +var path = require('path'); +var fs = require('fs'); +var child = require('child_process'); +var common = require('./common'); +var _tempDir = require('./tempdir').tempDir; +var _pwd = require('./pwd'); + +var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; +var DEFAULT_ERROR_CODE = 1; + +common.register('exec', _exec, { + unix: false, + canReceivePipe: true, + wrapOutput: false, + handlesFatalDynamically: true, +}); + +// We use this function to run `exec` synchronously while also providing realtime +// output. +function execSync(cmd, opts, pipe) { + if (!common.config.execPath) { + try { + common.error('Unable to find a path to the node binary. Please manually set config.execPath'); + } catch (e) { + if (opts.fatal) { + throw e; + } + + return; + } + } + + var tempDir = _tempDir(); + var paramsFile = path.join(tempDir, common.randomFileName()); + var stderrFile = path.join(tempDir, common.randomFileName()); + var stdoutFile = path.join(tempDir, common.randomFileName()); + + opts = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary + cwd: _pwd().toString(), + env: process.env, + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + encoding: 'utf8', + }, opts); + + if (fs.existsSync(paramsFile)) common.unlinkSync(paramsFile); + if (fs.existsSync(stderrFile)) common.unlinkSync(stderrFile); + if (fs.existsSync(stdoutFile)) common.unlinkSync(stdoutFile); + + opts.cwd = path.resolve(opts.cwd); + + var paramsToSerialize = { + command: cmd, + execOptions: opts, + pipe, + stdoutFile, + stderrFile, + }; + + // Create the files and ensure these are locked down (for read and write) to + // the current user. The main concerns here are: + // + // * If we execute a command which prints sensitive output, then + // stdoutFile/stderrFile must not be readable by other users. + // * paramsFile must not be readable by other users, or else they can read it + // to figure out the path for stdoutFile/stderrFile and create these first + // (locked down to their own access), which will crash exec() when it tries + // to write to the files. + function writeFileLockedDown(filePath, data) { + fs.writeFileSync(filePath, data, { + encoding: 'utf8', + mode: parseInt('600', 8), + }); + } + writeFileLockedDown(stdoutFile, ''); + writeFileLockedDown(stderrFile, ''); + writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize)); + + var execArgs = [ + path.join(__dirname, 'exec-child.js'), + paramsFile, + ]; + + /* istanbul ignore else */ + if (opts.silent) { + opts.stdio = 'ignore'; + } else { + opts.stdio = [0, 1, 2]; + } + + var code = 0; + + // Welcome to the future + try { + // Bad things if we pass in a `shell` option to child_process.execFileSync, + // so we need to explicitly remove it here. + delete opts.shell; + + child.execFileSync(common.config.execPath, execArgs, opts); + } catch (e) { + // Commands with non-zero exit code raise an exception. + code = e.status || DEFAULT_ERROR_CODE; + } + + // fs.readFileSync uses buffer encoding by default, so call + // it without the encoding option if the encoding is 'buffer'. + // Also, if the exec timeout is too short for node to start up, + // the files will not be created, so these calls will throw. + var stdout = ''; + var stderr = ''; + if (opts.encoding === 'buffer') { + stdout = fs.readFileSync(stdoutFile); + stderr = fs.readFileSync(stderrFile); + } else { + stdout = fs.readFileSync(stdoutFile, opts.encoding); + stderr = fs.readFileSync(stderrFile, opts.encoding); + } + + // No biggie if we can't erase the files now -- they're in a temp dir anyway + // and we locked down permissions (see the note above). + try { common.unlinkSync(paramsFile); } catch (e) {} + try { common.unlinkSync(stderrFile); } catch (e) {} + try { common.unlinkSync(stdoutFile); } catch (e) {} + + if (code !== 0) { + // Note: `silent` should be unconditionally true to avoid double-printing + // the command's stderr, and to avoid printing any stderr when the user has + // set `shell.config.silent`. + common.error(stderr, code, { continue: true, silent: true, fatal: opts.fatal }); + } + var obj = common.ShellString(stdout, stderr, code); + return obj; +} // execSync() + +// Wrapper around exec() to enable echoing output to console in real time +function execAsync(cmd, opts, pipe, callback) { + opts = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary + cwd: _pwd().toString(), + env: process.env, + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + encoding: 'utf8', + }, opts); + + var c = child.exec(cmd, opts, function (err, stdout, stderr) { + if (callback) { + if (!err) { + callback(0, stdout, stderr); + } else if (err.code === undefined) { + // See issue #536 + /* istanbul ignore next */ + callback(1, stdout, stderr); + } else { + callback(err.code, stdout, stderr); + } + } + }); + + if (pipe) c.stdin.end(pipe); + + if (!opts.silent) { + c.stdout.pipe(process.stdout); + c.stderr.pipe(process.stderr); + } + + return c; +} + +//@ +//@ ### exec(command [, options] [, callback]) +//@ +//@ Available options: +//@ +//@ + `async`: Asynchronous execution. If a callback is provided, it will be set to +//@ `true`, regardless of the passed value (default: `false`). +//@ + `fatal`: Exit upon error (default: `false`). +//@ + `silent`: Do not echo program output to console (default: `false`). +//@ + `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and +//@ what is written to stdout and stderr when not in silent mode (default: `'utf8'`). +//@ + and any option available to Node.js's +//@ [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var version = exec('node --version', {silent:true}).stdout; +//@ +//@ var child = exec('some_long_running_process', {async:true}); +//@ child.stdout.on('data', function(data) { +//@ /* ... do something with data ... */ +//@ }); +//@ +//@ exec('some_long_running_process', function(code, stdout, stderr) { +//@ console.log('Exit code:', code); +//@ console.log('Program output:', stdout); +//@ console.log('Program stderr:', stderr); +//@ }); +//@ ``` +//@ +//@ Executes the given `command` _synchronously_, unless otherwise specified. +//@ When in synchronous mode, this returns a [ShellString](#shellstringstr). +//@ Otherwise, this returns the child process object, and the `callback` +//@ receives the arguments `(code, stdout, stderr)`. +//@ +//@ Not seeing the behavior you want? `exec()` runs everything through `sh` +//@ by default (or `cmd.exe` on Windows), which differs from `bash`. If you +//@ need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. +//@ +//@ **Security note:** as `shell.exec()` executes an arbitrary string in the +//@ system shell, it is **critical** to properly sanitize user input to avoid +//@ **command injection**. For more context, consult the [Security +//@ Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). +function _exec(command, options, callback) { + options = options || {}; + + var pipe = common.readFromPipe(); + + // Callback is defined instead of options. + if (typeof options === 'function') { + callback = options; + options = { async: true }; + } + + // Callback is defined with options. + if (typeof options === 'object' && typeof callback === 'function') { + options.async = true; + } + + options = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, + async: false, + }, options); + + if (!command) { + try { + common.error('must specify command'); + } catch (e) { + if (options.fatal) { + throw e; + } + + return; + } + } + + if (options.async) { + return execAsync(command, options, pipe, callback); + } else { + return execSync(command, options, pipe); + } +} +module.exports = _exec; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js new file mode 100644 index 0000000..80db993 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js @@ -0,0 +1,66 @@ +var path = require('path'); +var common = require('./common'); +var _ls = require('./ls'); + +common.register('find', _find, { + cmdOptions: { + 'L': 'link', + }, +}); + +//@ +//@ ### find(path [, path ...]) +//@ ### find(path_array) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ find('src', 'lib'); +//@ find(['src', 'lib']); // same as above +//@ find('.').filter(function(file) { return file.match(/\.js$/); }); +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all +//@ files (however deep) in the given paths. +//@ +//@ The main difference from `ls('-R', path)` is that the resulting file names +//@ include the base directories (e.g., `lib/resources/file1` instead of just `file1`). +function _find(options, paths) { + if (!paths) { + common.error('no path specified'); + } else if (typeof paths === 'string') { + paths = [].slice.call(arguments, 1); + } + + var list = []; + + function pushFile(file) { + if (process.platform === 'win32') { + file = file.replace(/\\/g, '/'); + } + list.push(file); + } + + // why not simply do `ls('-R', paths)`? because the output wouldn't give the base dirs + // to get the base dir in the output, we need instead `ls('-R', 'dir/*')` for every directory + + paths.forEach(function (file) { + var stat; + try { + stat = common.statFollowLinks(file); + } catch (e) { + common.error('no such file or directory: ' + file); + } + + pushFile(file); + + if (stat.isDirectory()) { + _ls({ recursive: true, all: true, link: options.link }, file).forEach(function (subfile) { + pushFile(path.join(file, subfile)); + }); + } + }); + + return list; +} +module.exports = _find; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js new file mode 100644 index 0000000..cfc83e4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js @@ -0,0 +1,198 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('grep', _grep, { + globStart: 2, // don't glob-expand the regex + canReceivePipe: true, + cmdOptions: { + 'v': 'inverse', + 'l': 'nameOnly', + 'i': 'ignoreCase', + 'n': 'lineNumber', + 'B': 'beforeContext', + 'A': 'afterContext', + 'C': 'context', + }, +}); + +//@ +//@ ### grep([options,] regex_filter, file [, file ...]) +//@ ### grep([options,] regex_filter, file_array) +//@ +//@ Available options: +//@ +//@ + `-v`: Invert `regex_filter` (only print non-matching lines). +//@ + `-l`: Print only filenames of matching files. +//@ + `-i`: Ignore case. +//@ + `-n`: Print line numbers. +//@ + `-B `: Show `` lines before each result. +//@ + `-A `: Show `` lines after each result. +//@ + `-C `: Show `` lines before and after each result. -B and -A override this option. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ grep('-v', 'GLOBAL_VARIABLE', '*.js'); +//@ grep('GLOBAL_VARIABLE', '*.js'); +//@ grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); +//@ grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); +//@ grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); +//@ ``` +//@ +//@ Reads input string from given files and returns a +//@ [ShellString](#shellstringstr) containing all lines of the @ file that match +//@ the given `regex_filter`. +function _grep(options, regex, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given', 2); + + var idx = 2; + var contextError = ': invalid context length argument'; + // If the option has been found but not read, copy value from arguments + if (options.beforeContext === true) { + idx = 3; + options.beforeContext = Number(arguments[1]); + if (options.beforeContext < 0) { + common.error(options.beforeContext + contextError, 2); + } + } + if (options.afterContext === true) { + idx = 3; + options.afterContext = Number(arguments[1]); + if (options.afterContext < 0) { + common.error(options.afterContext + contextError, 2); + } + } + if (options.context === true) { + idx = 3; + options.context = Number(arguments[1]); + if (options.context < 0) { + common.error(options.context + contextError, 2); + } + } + // If before or after not given but context is, update values + if (typeof options.context === 'number') { + if (options.beforeContext === false) { + options.beforeContext = options.context; + } + if (options.afterContext === false) { + options.afterContext = options.context; + } + } + regex = arguments[idx - 1]; + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var grep = []; + if (options.ignoreCase) { + regex = new RegExp(regex, 'i'); + } + files.forEach(function (file) { + if (!fs.existsSync(file) && file !== '-') { + common.error('no such file or directory: ' + file, 2, { continue: true }); + return; + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + if (options.nameOnly) { + if (contents.match(regex)) { + grep.push(file); + } + } else { + var lines = contents.split('\n'); + var matches = []; + + lines.forEach(function (line, index) { + var matched = line.match(regex); + if ((options.inverse && !matched) || (!options.inverse && matched)) { + var lineNumber = index + 1; + var result = {}; + if (matches.length > 0) { + // If the last result intersects, combine them + var last = matches[matches.length - 1]; + var minimumLineNumber = Math.max( + 1, + lineNumber - options.beforeContext - 1, + ); + if ( + last.hasOwnProperty('' + lineNumber) || + last.hasOwnProperty('' + minimumLineNumber) + ) { + result = last; + } + } + result[lineNumber] = { + line, + match: true, + }; + if (options.beforeContext > 0) { + // Store the lines with their line numbers to check for overlap + lines + .slice(Math.max(index - options.beforeContext, 0), index) + .forEach(function (v, i, a) { + var lineNum = '' + (index - a.length + i + 1); + if (!result.hasOwnProperty(lineNum)) { + result[lineNum] = { line: v, match: false }; + } + }); + } + if (options.afterContext > 0) { + // Store the lines with their line numbers to check for overlap + lines + .slice( + index + 1, + Math.min(index + options.afterContext + 1, lines.length - 1), + ) + .forEach(function (v, i) { + var lineNum = '' + (index + 1 + i + 1); + if (!result.hasOwnProperty(lineNum)) { + result[lineNum] = { line: v, match: false }; + } + }); + } + // Only add the result if it's new + if (!matches.includes(result)) { + matches.push(result); + } + } + }); + + // Loop through the matches and add them to the output + Array.prototype.push.apply( + grep, + matches.map(function (result) { + return Object.entries(result) + .map(function (entry) { + var lineNumber = entry[0]; + var line = entry[1].line; + var match = entry[1].match; + return options.lineNumber + ? lineNumber + (match ? ':' : '-') + line + : line; + }) + .join('\n'); + }), + ); + } + }); + + if (grep.length === 0 && common.state.errorCode !== 2) { + // We didn't hit the error above, but pattern didn't match + common.error('', { silent: true }); + } + + var separator = '\n'; + if ( + typeof options.beforeContext === 'number' || + typeof options.afterContext === 'number' + ) { + separator = '\n--\n'; + } + return grep.join(separator) + '\n'; +} +module.exports = _grep; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js new file mode 100644 index 0000000..f3f4f22 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js @@ -0,0 +1,107 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('head', _head, { + canReceivePipe: true, + cmdOptions: { + 'n': 'numLines', + }, +}); + +// Reads |numLines| lines or the entire file, whichever is less. +function readSomeLines(file, numLines) { + var buf = common.buffer(); + var bufLength = buf.length; + var bytesRead = bufLength; + var pos = 0; + + var fdr = fs.openSync(file, 'r'); + var numLinesRead = 0; + var ret = ''; + while (bytesRead === bufLength && numLinesRead < numLines) { + bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); + var bufStr = buf.toString('utf8', 0, bytesRead); + numLinesRead += bufStr.split('\n').length - 1; + ret += bufStr; + pos += bytesRead; + } + + fs.closeSync(fdr); + return ret; +} + +//@ +//@ ### head([{'-n': \},] file [, file ...]) +//@ ### head([{'-n': \},] file_array) +//@ +//@ Available options: +//@ +//@ + `-n `: Show the first `` lines of the files +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = head({'-n': 1}, 'file*.txt'); +//@ var str = head('file1', 'file2'); +//@ var str = head(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Read the start of a `file`. Returns a [ShellString](#shellstringstr). +function _head(options, files) { + var head = []; + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given'); + + var idx = 1; + if (options.numLines === true) { + idx = 2; + options.numLines = Number(arguments[1]); + } else if (options.numLines === false) { + options.numLines = 10; + } + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var shouldAppendNewline = false; + files.forEach(function (file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error("error reading '" + file + "': Is a directory", { + continue: true, + }); + return; + } + } + + var contents; + if (file === '-') { + contents = pipe; + } else if (options.numLines < 0) { + contents = fs.readFileSync(file, 'utf8'); + } else { + contents = readSomeLines(file, options.numLines); + } + + var lines = contents.split('\n'); + var hasTrailingNewline = (lines[lines.length - 1] === ''); + if (hasTrailingNewline) { + lines.pop(); + } + shouldAppendNewline = (hasTrailingNewline || options.numLines < lines.length); + + head = head.concat(lines.slice(0, options.numLines)); + }); + + if (shouldAppendNewline) { + head.push(''); // to add a trailing newline once we join + } + return head.join('\n'); +} +module.exports = _head; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js new file mode 100644 index 0000000..1d3d0e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js @@ -0,0 +1,75 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('ln', _ln, { + cmdOptions: { + 's': 'symlink', + 'f': 'force', + }, +}); + +//@ +//@ ### ln([options,] source, dest) +//@ +//@ Available options: +//@ +//@ + `-s`: symlink +//@ + `-f`: force +//@ +//@ Examples: +//@ +//@ ```javascript +//@ ln('file', 'newlink'); +//@ ln('-sf', 'file', 'existing'); +//@ ``` +//@ +//@ Links `source` to `dest`. Use `-f` to force the link, should `dest` already +//@ exist. Returns a [ShellString](#shellstringstr) indicating success or +//@ failure. +function _ln(options, source, dest) { + if (!source || !dest) { + common.error('Missing and/or '); + } + + source = String(source); + var sourcePath = path.normalize(source).replace(RegExp(path.sep + '$'), ''); + var isAbsolute = (path.resolve(source) === sourcePath); + dest = path.resolve(process.cwd(), String(dest)); + + if (fs.existsSync(dest)) { + if (!options.force) { + common.error('Destination file exists', { continue: true }); + } + + fs.unlinkSync(dest); + } + + if (options.symlink) { + var isWindows = process.platform === 'win32'; + var linkType = isWindows ? 'file' : null; + var resolvedSourcePath = isAbsolute ? sourcePath : path.resolve(process.cwd(), path.dirname(dest), source); + if (!fs.existsSync(resolvedSourcePath)) { + common.error('Source file does not exist', { continue: true }); + } else if (isWindows && common.statFollowLinks(resolvedSourcePath).isDirectory()) { + linkType = 'junction'; + } + + try { + fs.symlinkSync(linkType === 'junction' ? resolvedSourcePath : source, dest, linkType); + } catch (err) { + common.error(err.message); + } + } else { + if (!fs.existsSync(source)) { + common.error('Source file does not exist', { continue: true }); + } + try { + fs.linkSync(source, dest); + } catch (err) { + common.error(err.message); + } + } + return ''; +} +module.exports = _ln; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js new file mode 100644 index 0000000..7f32c6e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js @@ -0,0 +1,155 @@ +var path = require('path'); +var fs = require('fs'); +var glob = require('fast-glob'); +var common = require('./common'); + +// glob patterns use the UNIX path seperator +var globPatternRecursive = '/**'; + +common.register('ls', _ls, { + cmdOptions: { + 'R': 'recursive', + 'A': 'all', + 'L': 'link', + 'a': 'all_deprecated', + 'd': 'directory', + 'l': 'long', + }, +}); + +//@ +//@ ### ls([options,] [path, ...]) +//@ ### ls([options,] path_array) +//@ +//@ Available options: +//@ +//@ + `-R`: recursive +//@ + `-A`: all files (include files beginning with `.`, except for `.` and `..`) +//@ + `-L`: follow symlinks +//@ + `-d`: list directories themselves, not their contents +//@ + `-l`: provides more details for each file. Specifically, each file is +//@ represented by a structured object with separate fields for file +//@ metadata (see +//@ [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The +//@ return value also overrides `.toString()` to resemble `ls -l`'s +//@ output format for human readability, but programmatic usage should +//@ depend on the stable object format rather than the `.toString()` +//@ representation. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ ls('projs/*.js'); +//@ ls('projs/**/*.js'); // Find all js files recursively in projs +//@ ls('-R', '/users/me', '/tmp'); +//@ ls('-R', ['/users/me', '/tmp']); // same as above +//@ ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all +//@ the files in the given `path`, or files in the current directory if no +//@ `path` is provided. +function _ls(options, paths) { + if (options.all_deprecated) { + // We won't support the -a option as it's hard to image why it's useful + // (it includes '.' and '..' in addition to '.*' files) + // For backwards compatibility we'll dump a deprecated message and proceed as before + common.log('ls: Option -a is deprecated. Use -A instead'); + options.all = true; + } + + if (!paths) { + paths = ['.']; + } else { + paths = [].slice.call(arguments, 1); + } + + var list = []; + + function pushFile(abs, relName, stat) { + if (process.platform === 'win32') { + relName = relName.replace(/\\/g, '/'); + } + if (options.long) { + stat = stat || (options.link ? common.statFollowLinks(abs) : common.statNoFollowLinks(abs)); + list.push(addLsAttributes(relName, stat)); + } else { + // list.push(path.relative(rel || '.', file)); + list.push(relName); + } + } + + paths.forEach(function (p) { + var stat; + + try { + stat = options.link ? common.statFollowLinks(p) : common.statNoFollowLinks(p); + // follow links to directories by default + if (stat.isSymbolicLink()) { + /* istanbul ignore next */ + // workaround for https://github.com/shelljs/shelljs/issues/795 + // codecov seems to have a bug that miscalculate this block as uncovered. + // but according to nyc report this block does get covered. + try { + var _stat = common.statFollowLinks(p); + if (_stat.isDirectory()) { + stat = _stat; + } + } catch (_) {} // bad symlink, treat it like a file + } + } catch (e) { + common.error('no such file or directory: ' + p, 2, { continue: true }); + return; + } + + // If the stat succeeded + if (stat.isDirectory() && !options.directory) { + if (options.recursive) { + // use glob, because it's simple + glob.sync(p + globPatternRecursive, { + // These options are just to make fast-glob be compatible with POSIX + // (bash) wildcard behavior. + onlyFiles: false, + + // These options depend on the cmdOptions provided to ls. + dot: options.all, + followSymbolicLinks: options.link, + }).forEach(function (item) { + // Glob pattern returns the directory itself and needs to be filtered out. + if (path.relative(p, item)) { + pushFile(item, path.relative(p, item)); + } + }); + } else if (options.all) { + // use fs.readdirSync, because it's fast + fs.readdirSync(p).forEach(function (item) { + pushFile(path.join(p, item), item); + }); + } else { + // use fs.readdirSync and then filter out secret files + fs.readdirSync(p).forEach(function (item) { + if (item[0] !== '.') { + pushFile(path.join(p, item), item); + } + }); + } + } else { + pushFile(p, p, stat); + } + }); + + // Add methods, to make this more compatible with ShellStrings + return list; +} + +function addLsAttributes(pathName, stats) { + // Note: this object will contain more information than .toString() returns + stats.name = pathName; + stats.toString = function () { + // Return a string resembling unix's `ls -l` format + return [this.mode, this.nlink, this.uid, this.gid, this.size, this.mtime, this.name].join(' '); + }; + return stats; +} + +module.exports = _ls; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js new file mode 100644 index 0000000..021cad9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js @@ -0,0 +1,102 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('mkdir', _mkdir, { + cmdOptions: { + 'p': 'fullpath', + }, +}); + +// Recursively creates `dir` +function mkdirSyncRecursive(dir) { + var baseDir = path.dirname(dir); + + // Prevents some potential problems arising from malformed UNCs or + // insufficient permissions. + /* istanbul ignore next */ + if (baseDir === dir) { + common.error('dirname() failed: [' + dir + ']'); + } + + // Base dir does not exist, go recursive + if (!fs.existsSync(baseDir)) { + mkdirSyncRecursive(baseDir); + } + + try { + // Base dir created, can create dir + fs.mkdirSync(dir, parseInt('0777', 8)); + } catch (e) { + // swallow error if dir already exists + if (e.code !== 'EEXIST' || common.statNoFollowLinks(dir).isFile()) { throw e; } + } +} + +//@ +//@ ### mkdir([options,] dir [, dir ...]) +//@ ### mkdir([options,] dir_array) +//@ +//@ Available options: +//@ +//@ + `-p`: full path (and create intermediate directories, if necessary) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); +//@ mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above +//@ ``` +//@ +//@ Creates directories. Returns a [ShellString](#shellstringstr) indicating +//@ success or failure. +function _mkdir(options, dirs) { + if (!dirs) common.error('no paths given'); + + if (typeof dirs === 'string') { + dirs = [].slice.call(arguments, 1); + } + // if it's array leave it as it is + + dirs.forEach(function (dir) { + try { + var stat = common.statNoFollowLinks(dir); + if (!options.fullpath) { + common.error('path already exists: ' + dir, { continue: true }); + } else if (stat.isFile()) { + common.error('cannot create directory ' + dir + ': File exists', { continue: true }); + } + return; // skip dir + } catch (e) { + // do nothing + } + + // Base dir does not exist, and no -p option given + var baseDir = path.dirname(dir); + if (!fs.existsSync(baseDir) && !options.fullpath) { + common.error('no such file or directory: ' + baseDir, { continue: true }); + return; // skip dir + } + + try { + if (options.fullpath) { + mkdirSyncRecursive(path.resolve(dir)); + } else { + fs.mkdirSync(dir, parseInt('0777', 8)); + } + } catch (e) { + var reason; + if (e.code === 'EACCES') { + reason = 'Permission denied'; + } else if (e.code === 'ENOTDIR' || e.code === 'ENOENT') { + reason = 'Not a directory'; + } else { + /* istanbul ignore next */ + throw e; + } + common.error('cannot create directory ' + dir + ': ' + reason, { continue: true }); + } + }); + return ''; +} // man arraykdir +module.exports = _mkdir; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js new file mode 100644 index 0000000..6e89e2f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js @@ -0,0 +1,119 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); +var cp = require('./cp'); +var rm = require('./rm'); + +common.register('mv', _mv, { + cmdOptions: { + 'f': '!no_force', + 'n': 'no_force', + }, +}); + +// Checks if cureent file was created recently +function checkRecentCreated(sources, index) { + var lookedSource = sources[index]; + return sources.slice(0, index).some(function (src) { + return path.basename(src) === path.basename(lookedSource); + }); +} + +//@ +//@ ### mv([options ,] source [, source ...], dest') +//@ ### mv([options ,] source_array, dest') +//@ +//@ Available options: +//@ +//@ + `-f`: force (default behavior) +//@ + `-n`: no-clobber +//@ +//@ Examples: +//@ +//@ ```javascript +//@ mv('-n', 'file', 'dir/'); +//@ mv('file1', 'file2', 'dir/'); +//@ mv(['file1', 'file2'], 'dir/'); // same as above +//@ ``` +//@ +//@ Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) +//@ indicating success or failure. +function _mv(options, sources, dest) { + // Get sources, dest + if (arguments.length < 3) { + common.error('missing and/or '); + } else if (arguments.length > 3) { + sources = [].slice.call(arguments, 1, arguments.length - 1); + dest = arguments[arguments.length - 1]; + } else if (typeof sources === 'string') { + sources = [sources]; + } else { + // TODO(nate): figure out if we actually need this line + common.error('invalid arguments'); + } + + var exists = fs.existsSync(dest); + var stats = exists && common.statFollowLinks(dest); + + // Dest is not existing dir, but multiple sources given + if ((!exists || !stats.isDirectory()) && sources.length > 1) { + common.error('dest is not a directory (too many sources)'); + } + + // Dest is an existing file, but no -f given + if (exists && stats.isFile() && options.no_force) { + common.error('dest file already exists: ' + dest); + } + + sources.forEach(function (src, srcIndex) { + if (!fs.existsSync(src)) { + common.error('no such file or directory: ' + src, { continue: true }); + return; // skip file + } + + // If here, src exists + + // When copying to '/path/dir': + // thisDest = '/path/dir/file1' + var thisDest = dest; + if (fs.existsSync(dest) && common.statFollowLinks(dest).isDirectory()) { + thisDest = path.normalize(dest + '/' + path.basename(src)); + } + + var thisDestExists = fs.existsSync(thisDest); + + if (thisDestExists && checkRecentCreated(sources, srcIndex)) { + // cannot overwrite file created recently in current execution, but we want to continue copying other files + if (!options.no_force) { + common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); + } + return; + } + + if (fs.existsSync(thisDest) && options.no_force) { + common.error('dest file already exists: ' + thisDest, { continue: true }); + return; // skip file + } + + if (path.resolve(src) === path.dirname(path.resolve(thisDest))) { + common.error('cannot move to self: ' + src, { continue: true }); + return; // skip file + } + + try { + fs.renameSync(src, thisDest); + } catch (e) { + /* istanbul ignore next */ + if (e.code === 'EXDEV') { + // If we're trying to `mv` to an external partition, we'll actually need + // to perform a copy and then clean up the original file. If either the + // copy or the rm fails with an exception, we should allow this + // exception to pass up to the top level. + cp({ recursive: true }, src, thisDest); + rm({ recursive: true, force: true }, src); + } + } + }); // forEach(src) + return ''; +} // mv +module.exports = _mv; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js new file mode 100644 index 0000000..d9eac3f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js @@ -0,0 +1 @@ +// see dirs.js diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js new file mode 100644 index 0000000..d9eac3f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js @@ -0,0 +1 @@ +// see dirs.js diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js new file mode 100644 index 0000000..8527d8b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js @@ -0,0 +1,16 @@ +var path = require('path'); +var common = require('./common'); + +common.register('pwd', _pwd, { + allowGlobbing: false, +}); + +//@ +//@ ### pwd() +//@ +//@ Returns the current directory as a [ShellString](#shellstringstr). +function _pwd() { + var pwd = path.resolve(process.cwd()); + return pwd; +} +module.exports = _pwd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js new file mode 100644 index 0000000..6bb5755 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js @@ -0,0 +1,201 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('rm', _rm, { + cmdOptions: { + 'f': 'force', + 'r': 'recursive', + 'R': 'recursive', + }, +}); + +// Recursively removes 'dir' +// Adapted from https://github.com/ryanmcgrath/wrench-js +// +// Copyright (c) 2010 Ryan McGrath +// Copyright (c) 2012 Artur Adib +// +// Licensed under the MIT License +// http://www.opensource.org/licenses/mit-license.php +function rmdirSyncRecursive(dir, force, fromSymlink) { + var files; + + files = fs.readdirSync(dir); + + // Loop through and delete everything in the sub-tree after checking it + for (var i = 0; i < files.length; i++) { + var file = dir + '/' + files[i]; + var currFile = common.statNoFollowLinks(file); + + if (currFile.isDirectory()) { // Recursive function back to the beginning + rmdirSyncRecursive(file, force); + } else if (force || isWriteable(file)) { + // Assume it's a file - perhaps a try/catch belongs here? + try { + common.unlinkSync(file); + } catch (e) { + /* istanbul ignore next */ + common.error('could not remove file (code ' + e.code + '): ' + file, { + continue: true, + }); + } + } + } + + // if was directory was referenced through a symbolic link, + // the contents should be removed, but not the directory itself + if (fromSymlink) return; + + // Now that we know everything in the sub-tree has been deleted, we can delete the main directory. + // Huzzah for the shopkeep. + + var result; + try { + // Retry on windows, sometimes it takes a little time before all the files in the directory are gone + var start = Date.now(); + + // TODO: replace this with a finite loop + for (;;) { + try { + result = fs.rmdirSync(dir); + if (fs.existsSync(dir)) throw { code: 'EAGAIN' }; + break; + } catch (er) { + /* istanbul ignore next */ + // In addition to error codes, also check if the directory still exists and loop again if true + if (process.platform === 'win32' && (er.code === 'ENOTEMPTY' || er.code === 'EBUSY' || er.code === 'EPERM' || er.code === 'EAGAIN')) { + if (Date.now() - start > 1000) throw er; + } else if (er.code === 'ENOENT') { + // Directory did not exist, deletion was successful + break; + } else { + throw er; + } + } + } + } catch (e) { + common.error('could not remove directory (code ' + e.code + '): ' + dir, { continue: true }); + } + + return result; +} // rmdirSyncRecursive + +// Hack to determine if file has write permissions for current user +// Avoids having to check user, group, etc, but it's probably slow +function isWriteable(file) { + var writePermission = true; + try { + var __fd = fs.openSync(file, 'a'); + fs.closeSync(__fd); + } catch (e) { + writePermission = false; + } + + return writePermission; +} + +function handleFile(file, options) { + if (options.force || isWriteable(file)) { + // -f was passed, or file is writable, so it can be removed + common.unlinkSync(file); + } else { + common.error('permission denied: ' + file, { continue: true }); + } +} + +function handleDirectory(file, options) { + if (options.recursive) { + // -r was passed, so directory can be removed + rmdirSyncRecursive(file, options.force); + } else { + common.error('path is a directory', { continue: true }); + } +} + +function handleSymbolicLink(file, options) { + var stats; + try { + stats = common.statFollowLinks(file); + } catch (e) { + // symlink is broken, so remove the symlink itself + common.unlinkSync(file); + return; + } + + if (stats.isFile()) { + common.unlinkSync(file); + } else if (stats.isDirectory()) { + if (file[file.length - 1] === '/') { + // trailing separator, so remove the contents, not the link + if (options.recursive) { + // -r was passed, so directory can be removed + var fromSymlink = true; + rmdirSyncRecursive(file, options.force, fromSymlink); + } else { + common.error('path is a directory', { continue: true }); + } + } else { + // no trailing separator, so remove the link + common.unlinkSync(file); + } + } +} + +function handleFIFO(file) { + common.unlinkSync(file); +} + +//@ +//@ ### rm([options,] file [, file ...]) +//@ ### rm([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-f`: force +//@ + `-r, -R`: recursive +//@ +//@ Examples: +//@ +//@ ```javascript +//@ rm('-rf', '/tmp/*'); +//@ rm('some_file.txt', 'another_file.txt'); +//@ rm(['some_file.txt', 'another_file.txt']); // same as above +//@ ``` +//@ +//@ Removes files. Returns a [ShellString](#shellstringstr) indicating success +//@ or failure. +function _rm(options, files) { + if (!files) common.error('no paths given'); + + // Convert to array + files = [].slice.call(arguments, 1); + + files.forEach(function (file) { + var lstats; + try { + var filepath = (file[file.length - 1] === '/') + ? file.slice(0, -1) // remove the '/' so lstatSync can detect symlinks + : file; + lstats = common.statNoFollowLinks(filepath); // test for existence + } catch (e) { + // Path does not exist, no force flag given + if (!options.force) { + common.error('no such file or directory: ' + file, { continue: true }); + } + return; // skip file + } + + // If here, path exists + if (lstats.isFile()) { + handleFile(file, options); + } else if (lstats.isDirectory()) { + handleDirectory(file, options); + } else if (lstats.isSymbolicLink()) { + handleSymbolicLink(file, options); + } else if (lstats.isFIFO()) { + handleFIFO(file); + } + }); // forEach(file) + return ''; +} // rm +module.exports = _rm; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js new file mode 100644 index 0000000..6936523 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js @@ -0,0 +1,95 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('sed', _sed, { + globStart: 3, // don't glob-expand regexes + canReceivePipe: true, + cmdOptions: { + 'i': 'inplace', + }, +}); + +//@ +//@ ### sed([options,] search_regex, replacement, file [, file ...]) +//@ ### sed([options,] search_regex, replacement, file_array) +//@ +//@ Available options: +//@ +//@ + `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ +//@ +//@ Examples: +//@ +//@ ```javascript +//@ sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); +//@ ``` +//@ +//@ Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on +//@ each of the lines from the input string using the given `search_regex` and `replacement` string or +//@ function. Returns the new [ShellString](#shellstringstr) after replacement. +//@ +//@ Note: +//@ +//@ Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified +//@ using the `$n` syntax: +//@ +//@ ```javascript +//@ sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); +//@ ``` +//@ +//@ Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file +//@ (split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') +//@ will not match anything and nothing will be replaced. +function _sed(options, regex, replacement, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (typeof replacement !== 'string' && typeof replacement !== 'function') { + if (typeof replacement === 'number') { + replacement = replacement.toString(); // fallback + } else { + common.error('invalid replacement string'); + } + } + + // Convert all search strings to RegExp + if (typeof regex === 'string') { + regex = RegExp(regex); + } + + if (!files && !pipe) { + common.error('no files given'); + } + + files = [].slice.call(arguments, 3); + + if (pipe) { + files.unshift('-'); + } + + var sed = []; + files.forEach(function (file) { + if (!fs.existsSync(file) && file !== '-') { + common.error('no such file or directory: ' + file, 2, { continue: true }); + return; + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + var lines = contents.split('\n'); + var result = lines.map(function (line) { + return line.replace(regex, replacement); + }).join('\n'); + + sed.push(result); + + if (options.inplace) { + fs.writeFileSync(file, result, 'utf8'); + } + }); + + if (options.inplace) { + return ''; + } else { + return sed.join('\n'); + } +} +module.exports = _sed; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js new file mode 100644 index 0000000..6f37bc9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js @@ -0,0 +1,55 @@ +var common = require('./common'); + +common.register('set', _set, { + allowGlobbing: false, + wrapOutput: false, +}); + +//@ +//@ ### set(options) +//@ +//@ Available options: +//@ +//@ + `+/-e`: exit upon error (`config.fatal`) +//@ + `+/-v`: verbose: show all commands (`config.verbose`) +//@ + `+/-f`: disable filename expansion (globbing) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ set('-e'); // exit upon first error +//@ set('+e'); // this undoes a "set('-e')" +//@ ``` +//@ +//@ Sets global configuration variables. +function _set(options) { + if (!options) { + var args = [].slice.call(arguments, 0); + if (args.length < 2) common.error('must provide an argument'); + options = args[1]; + } + var negate = (options[0] === '+'); + if (negate) { + options = '-' + options.slice(1); // parseOptions needs a '-' prefix + } + options = common.parseOptions(options, { + 'e': 'fatal', + 'v': 'verbose', + 'f': 'noglob', + }); + + if (negate) { + Object.keys(options).forEach(function (key) { + options[key] = !options[key]; + }); + } + + Object.keys(options).forEach(function (key) { + // Only change the global config if `negate` is false and the option is true + // or if `negate` is true and the option is false (aka negate !== option) + if (negate !== options[key]) { + common.config[key] = options[key]; + } + }); +} +module.exports = _set; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js new file mode 100644 index 0000000..66b042c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js @@ -0,0 +1,98 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('sort', _sort, { + canReceivePipe: true, + cmdOptions: { + 'r': 'reverse', + 'n': 'numerical', + }, +}); + +// parse out the number prefix of a line +function parseNumber(str) { + var match = str.match(/^\s*(\d*)\s*(.*)$/); + return { num: Number(match[1]), value: match[2] }; +} + +// compare two strings case-insensitively, but examine case for strings that are +// case-insensitive equivalent +function unixCmp(a, b) { + var aLower = a.toLowerCase(); + var bLower = b.toLowerCase(); + return (aLower === bLower ? + -1 * a.localeCompare(b) : // unix sort treats case opposite how javascript does + aLower.localeCompare(bLower)); +} + +// compare two strings in the fashion that unix sort's -n option works +function numericalCmp(a, b) { + var objA = parseNumber(a); + var objB = parseNumber(b); + if (objA.hasOwnProperty('num') && objB.hasOwnProperty('num')) { + return ((objA.num !== objB.num) ? + (objA.num - objB.num) : + unixCmp(objA.value, objB.value)); + } else { + return unixCmp(objA.value, objB.value); + } +} + +//@ +//@ ### sort([options,] file [, file ...]) +//@ ### sort([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-r`: Reverse the results +//@ + `-n`: Compare according to numerical value +//@ +//@ Examples: +//@ +//@ ```javascript +//@ sort('foo.txt', 'bar.txt'); +//@ sort('-r', 'foo.txt'); +//@ ``` +//@ +//@ Return the contents of the `file`s, sorted line-by-line as a +//@ [ShellString](#shellstringstr). Sorting multiple files mixes their content +//@ (just as unix `sort` does). +function _sort(options, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no files given'); + + files = [].slice.call(arguments, 1); + + if (pipe) { + files.unshift('-'); + } + + var lines = files.reduce(function (accum, file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return accum; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error('read failed: ' + file + ': Is a directory', { + continue: true, + }); + return accum; + } + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + return accum.concat(contents.trimRight().split('\n')); + }, []); + + var sorted = lines.sort(options.numerical ? numericalCmp : unixCmp); + + if (options.reverse) { + sorted = sorted.reverse(); + } + + return sorted.join('\n') + '\n'; +} + +module.exports = _sort; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js new file mode 100644 index 0000000..eee75c5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js @@ -0,0 +1,90 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('tail', _tail, { + canReceivePipe: true, + cmdOptions: { + 'n': 'numLines', + }, +}); + +//@ +//@ ### tail([{'-n': \},] file [, file ...]) +//@ ### tail([{'-n': \},] file_array) +//@ +//@ Available options: +//@ +//@ + `-n `: Show the last `` lines of `file`s +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = tail({'-n': 1}, 'file*.txt'); +//@ var str = tail('file1', 'file2'); +//@ var str = tail(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Read the end of a `file`. Returns a [ShellString](#shellstringstr). +function _tail(options, files) { + var tail = []; + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given'); + + var idx = 1; + var plusOption = false; + if (options.numLines === true) { + idx = 2; + if (arguments[1][0] === '+') { + plusOption = true; + } + options.numLines = Number(arguments[1]); + } else if (options.numLines === false) { + options.numLines = 10; + } + // arguments[0] is a json object + if (arguments[0].numLines[0] === '+') { + plusOption = true; + } + options.numLines = -1 * Math.abs(options.numLines); + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var shouldAppendNewline = false; + files.forEach(function (file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error("error reading '" + file + "': Is a directory", { + continue: true, + }); + return; + } + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + + var lines = contents.split('\n'); + if (lines[lines.length - 1] === '') { + lines.pop(); + shouldAppendNewline = true; + } else { + shouldAppendNewline = false; + } + + tail = tail.concat(plusOption ? lines.slice(-options.numLines - 1) : lines.slice(options.numLines)); + }); + + if (shouldAppendNewline) { + tail.push(''); // to add a trailing newline once we join + } + + return tail.join('\n'); +} + +module.exports = _tail; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js new file mode 100644 index 0000000..b6f7796 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js @@ -0,0 +1,75 @@ +var os = require('os'); +var fs = require('fs'); +var common = require('./common'); + +common.register('tempdir', _tempDir, { + allowGlobbing: false, + wrapOutput: false, +}); + +// Returns false if 'dir' is not a writeable directory, 'dir' otherwise +function writeableDir(dir) { + if (!dir || !fs.existsSync(dir)) return false; + + if (!common.statFollowLinks(dir).isDirectory()) return false; + + var testFile = dir + '/' + common.randomFileName(); + try { + fs.writeFileSync(testFile, ' '); + common.unlinkSync(testFile); + return dir; + } catch (e) { + /* istanbul ignore next */ + return false; + } +} + +// Variable to cache the tempdir value for successive lookups. +var cachedTempDir; + +//@ +//@ ### tempdir() +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var tmp = tempdir(); // "/tmp" for most *nix platforms +//@ ``` +//@ +//@ Searches and returns string containing a writeable, platform-dependent temporary directory. +//@ Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). +function _tempDir() { + if (cachedTempDir) return cachedTempDir; + + cachedTempDir = writeableDir(os.tmpdir()) || + writeableDir(process.env.TMPDIR) || + writeableDir(process.env.TEMP) || + writeableDir(process.env.TMP) || + writeableDir(process.env.Wimp$ScrapDir) || // RiscOS + writeableDir('C:\\TEMP') || // Windows + writeableDir('C:\\TMP') || // Windows + writeableDir('\\TEMP') || // Windows + writeableDir('\\TMP') || // Windows + writeableDir('/tmp') || + writeableDir('/var/tmp') || + writeableDir('/usr/tmp') || + writeableDir('.'); // last resort + + return cachedTempDir; +} + +// Indicates if the tempdir value is currently cached. This is exposed for tests +// only. The return value should only be tested for truthiness. +function isCached() { + return cachedTempDir; +} + +// Clears the cached tempDir value, if one is cached. This is exposed for tests +// only. +function clearCache() { + cachedTempDir = undefined; +} + +module.exports.tempDir = _tempDir; +module.exports.isCached = isCached; +module.exports.clearCache = clearCache; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js new file mode 100644 index 0000000..7e76908 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js @@ -0,0 +1,86 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('test', _test, { + cmdOptions: { + 'b': 'block', + 'c': 'character', + 'd': 'directory', + 'e': 'exists', + 'f': 'file', + 'L': 'link', + 'p': 'pipe', + 'S': 'socket', + }, + wrapOutput: false, + allowGlobbing: false, +}); + + +//@ +//@ ### test(expression) +//@ +//@ Available expression primaries: +//@ +//@ + `'-b', 'path'`: true if path is a block device +//@ + `'-c', 'path'`: true if path is a character device +//@ + `'-d', 'path'`: true if path is a directory +//@ + `'-e', 'path'`: true if path exists +//@ + `'-f', 'path'`: true if path is a regular file +//@ + `'-L', 'path'`: true if path is a symbolic link +//@ + `'-p', 'path'`: true if path is a pipe (FIFO) +//@ + `'-S', 'path'`: true if path is a socket +//@ +//@ Examples: +//@ +//@ ```javascript +//@ if (test('-d', path)) { /* do something with dir */ }; +//@ if (!test('-f', path)) continue; // skip if it's not a regular file +//@ ``` +//@ +//@ Evaluates `expression` using the available primaries and returns +//@ corresponding boolean value. +function _test(options, path) { + if (!path) common.error('no path given'); + + var canInterpret = false; + Object.keys(options).forEach(function (key) { + if (options[key] === true) { + canInterpret = true; + } + }); + + if (!canInterpret) common.error('could not interpret expression'); + + if (options.link) { + try { + return common.statNoFollowLinks(path).isSymbolicLink(); + } catch (e) { + return false; + } + } + + if (!fs.existsSync(path)) return false; + + if (options.exists) return true; + + var stats = common.statFollowLinks(path); + + if (options.block) return stats.isBlockDevice(); + + if (options.character) return stats.isCharacterDevice(); + + if (options.directory) return stats.isDirectory(); + + if (options.file) return stats.isFile(); + + /* istanbul ignore next */ + if (options.pipe) return stats.isFIFO(); + + /* istanbul ignore next */ + if (options.socket) return stats.isSocket(); + + /* istanbul ignore next */ + return false; // fallback +} // test +module.exports = _test; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js new file mode 100644 index 0000000..e4b064f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js @@ -0,0 +1,38 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('to', _to, { + pipeOnly: true, + wrapOutput: false, +}); + +//@ +//@ ### ShellString.prototype.to(file) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cat('input.txt').to('output.txt'); +//@ ``` +//@ +//@ Analogous to the redirection operator `>` in Unix, but works with +//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix +//@ redirections, `to()` will overwrite any existing file!_ Returns the same +//@ [ShellString](#shellstringstr) this operated on, to support chaining. +function _to(options, file) { + if (!file) common.error('wrong arguments'); + + if (!fs.existsSync(path.dirname(file))) { + common.error('no such file or directory: ' + path.dirname(file)); + } + + try { + fs.writeFileSync(file, this.stdout || this.toString(), 'utf8'); + return this; + } catch (e) { + /* istanbul ignore next */ + common.error('could not write to file (code ' + e.code + '): ' + file, { continue: true }); + } +} +module.exports = _to; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js new file mode 100644 index 0000000..dc30e62 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js @@ -0,0 +1,37 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('toEnd', _toEnd, { + pipeOnly: true, + wrapOutput: false, +}); + +//@ +//@ ### ShellString.prototype.toEnd(file) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cat('input.txt').toEnd('output.txt'); +//@ ``` +//@ +//@ Analogous to the redirect-and-append operator `>>` in Unix, but works with +//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the +//@ same [ShellString](#shellstringstr) this operated on, to support chaining. +function _toEnd(options, file) { + if (!file) common.error('wrong arguments'); + + if (!fs.existsSync(path.dirname(file))) { + common.error('no such file or directory: ' + path.dirname(file)); + } + + try { + fs.appendFileSync(file, this.stdout || this.toString(), 'utf8'); + return this; + } catch (e) { + /* istanbul ignore next */ + common.error('could not append to file (code ' + e.code + '): ' + file, { continue: true }); + } +} +module.exports = _toEnd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js new file mode 100644 index 0000000..a268586 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js @@ -0,0 +1,117 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('touch', _touch, { + cmdOptions: { + 'a': 'atime_only', + 'c': 'no_create', + 'd': 'date', + 'm': 'mtime_only', + 'r': 'reference', + }, +}); + +//@ +//@ ### touch([options,] file [, file ...]) +//@ ### touch([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-a`: Change only the access time +//@ + `-c`: Do not create any files +//@ + `-m`: Change only the modification time +//@ + `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) +//@ instead of current time +//@ + `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current +//@ time +//@ +//@ Examples: +//@ +//@ ```javascript +//@ touch('source.js'); +//@ touch('-c', 'path/to/file.js'); +//@ touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); +//@ touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); +//@ touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); +//@ ``` +//@ +//@ Update the access and modification times of each file to the current time. +//@ A file argument that does not exist is created empty, unless `-c` is supplied. +//@ This is a partial implementation of +//@ [`touch(1)`](http://linux.die.net/man/1/touch). Returns a +//@ [ShellString](#shellstringstr) indicating success or failure. +function _touch(opts, files) { + if (!files) { + common.error('no files given'); + } else if (typeof files === 'string') { + files = [].slice.call(arguments, 1); + } else { + common.error('file arg should be a string file path or an Array of string file paths'); + } + + files.forEach(function (f) { + touchFile(opts, f); + }); + return ''; +} + +function touchFile(opts, file) { + var stat = tryStatFile(file); + + if (stat && stat.isDirectory()) { + // don't error just exit + return; + } + + // if the file doesn't already exist and the user has specified --no-create then + // this script is finished + if (!stat && opts.no_create) { + return; + } + + // open the file and then close it. this will create it if it doesn't exist but will + // not truncate the file + fs.closeSync(fs.openSync(file, 'a')); + + // + // Set timestamps + // + + // setup some defaults + var now = new Date(); + var mtime = opts.date || now; + var atime = opts.date || now; + + // use reference file + if (opts.reference) { + var refStat = tryStatFile(opts.reference); + if (!refStat) { + common.error('failed to get attributess of ' + opts.reference); + } + mtime = refStat.mtime; + atime = refStat.atime; + } else if (opts.date) { + mtime = opts.date; + atime = opts.date; + } + + if (opts.atime_only && opts.mtime_only) { + // keep the new values of mtime and atime like GNU + } else if (opts.atime_only) { + mtime = stat.mtime; + } else if (opts.mtime_only) { + atime = stat.atime; + } + + fs.utimesSync(file, atime, mtime); +} + +module.exports = _touch; + +function tryStatFile(filePath) { + try { + return common.statFollowLinks(filePath); + } catch (e) { + return null; + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js new file mode 100644 index 0000000..5802706 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js @@ -0,0 +1,93 @@ +var fs = require('fs'); +var common = require('./common'); + +// add c spaces to the left of str +function lpad(c, str) { + var res = '' + str; + if (res.length < c) { + res = Array((c - res.length) + 1).join(' ') + res; + } + return res; +} + +common.register('uniq', _uniq, { + canReceivePipe: true, + cmdOptions: { + 'i': 'ignoreCase', + 'c': 'count', + 'd': 'duplicates', + }, +}); + +//@ +//@ ### uniq([options,] [input, [output]]) +//@ +//@ Available options: +//@ +//@ + `-i`: Ignore case while comparing +//@ + `-c`: Prefix lines by the number of occurrences +//@ + `-d`: Only print duplicate lines, one for each group of identical lines +//@ +//@ Examples: +//@ +//@ ```javascript +//@ uniq('foo.txt'); +//@ uniq('-i', 'foo.txt'); +//@ uniq('-cd', 'foo.txt', 'bar.txt'); +//@ ``` +//@ +//@ Filter adjacent matching lines from `input`. Returns a +//@ [ShellString](#shellstringstr). +function _uniq(options, input, output) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!pipe) { + if (!input) common.error('no input given'); + + if (!fs.existsSync(input)) { + common.error(input + ': No such file or directory'); + } else if (common.statFollowLinks(input).isDirectory()) { + common.error("error reading '" + input + "'"); + } + } + if (output && fs.existsSync(output) && common.statFollowLinks(output).isDirectory()) { + common.error(output + ': Is a directory'); + } + + var lines = (input ? fs.readFileSync(input, 'utf8') : pipe) + .trimRight() + .split('\n'); + + var compare = function (a, b) { + return options.ignoreCase ? + a.toLocaleLowerCase().localeCompare(b.toLocaleLowerCase()) : + a.localeCompare(b); + }; + var uniqed = lines.reduceRight(function (res, e) { + // Perform uniq -c on the input + if (res.length === 0) { + return [{ count: 1, ln: e }]; + } else if (compare(res[0].ln, e) === 0) { + return [{ count: res[0].count + 1, ln: e }].concat(res.slice(1)); + } else { + return [{ count: 1, ln: e }].concat(res); + } + }, []).filter(function (obj) { + // Do we want only duplicated objects? + return options.duplicates ? obj.count > 1 : true; + }).map(function (obj) { + // Are we tracking the counts of each line? + return (options.count ? (lpad(7, obj.count) + ' ') : '') + obj.ln; + }).join('\n') + '\n'; + + if (output) { + (new common.ShellString(uniqed)).to(output); + // if uniq writes to output, nothing is passed to the next command in the pipeline (if any) + return ''; + } else { + return uniqed; + } +} + +module.exports = _uniq; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js new file mode 100644 index 0000000..8ac7b77 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js @@ -0,0 +1,119 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('which', _which, { + allowGlobbing: false, + cmdOptions: { + 'a': 'all', + }, +}); + +// XP's system default value for `PATHEXT` system variable, just in case it's not +// set on Windows. +var XP_DEFAULT_PATHEXT = '.com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh'; + +// For earlier versions of NodeJS that doesn't have a list of constants (< v6) +var FILE_EXECUTABLE_MODE = 1; + +function isWindowsPlatform() { + return process.platform === 'win32'; +} + +// Cross-platform method for splitting environment `PATH` variables +function splitPath(p) { + return p ? p.split(path.delimiter) : []; +} + +// Tests are running all cases for this func but it stays uncovered by codecov due to unknown reason +/* istanbul ignore next */ +function isExecutable(pathName) { + try { + // TODO(node-support): replace with fs.constants.X_OK once remove support for node < v6 + fs.accessSync(pathName, FILE_EXECUTABLE_MODE); + } catch (err) { + return false; + } + return true; +} + +function checkPath(pathName) { + return fs.existsSync(pathName) && !common.statFollowLinks(pathName).isDirectory() + && (isWindowsPlatform() || isExecutable(pathName)); +} + +//@ +//@ ### which(command) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var nodeExec = which('node'); +//@ ``` +//@ +//@ Searches for `command` in the system's `PATH`. On Windows, this uses the +//@ `PATHEXT` variable to append the extension if it's not already executable. +//@ Returns a [ShellString](#shellstringstr) containing the absolute path to +//@ `command`. +function _which(options, cmd) { + if (!cmd) common.error('must specify command'); + + var isWindows = isWindowsPlatform(); + var pathArray = splitPath(process.env.PATH); + + var queryMatches = []; + + // No relative/absolute paths provided? + if (!cmd.includes('/')) { + // Assume that there are no extensions to append to queries (this is the + // case for unix) + var pathExtArray = ['']; + if (isWindows) { + // In case the PATHEXT variable is somehow not set (e.g. + // child_process.spawn with an empty environment), use the XP default. + var pathExtEnv = process.env.PATHEXT || XP_DEFAULT_PATHEXT; + pathExtArray = splitPath(pathExtEnv.toUpperCase()); + } + + // Search for command in PATH + for (var k = 0; k < pathArray.length; k++) { + // already found it + if (queryMatches.length > 0 && !options.all) break; + + var attempt = path.resolve(pathArray[k], cmd); + + if (isWindows) { + attempt = attempt.toUpperCase(); + } + + var match = attempt.match(/\.[^<>:"/|?*.]+$/); + if (match && pathExtArray.includes(match[0])) { // this is Windows-only + // The user typed a query with the file extension, like + // `which('node.exe')` + if (checkPath(attempt)) { + queryMatches.push(attempt); + break; + } + } else { // All-platforms + // Cycle through the PATHEXT array, and check each extension + // Note: the array is always [''] on Unix + for (var i = 0; i < pathExtArray.length; i++) { + var ext = pathExtArray[i]; + var newAttempt = attempt + ext; + if (checkPath(newAttempt)) { + queryMatches.push(newAttempt); + break; + } + } + } + } + } else if (checkPath(cmd)) { // a valid absolute or relative path + queryMatches.push(path.resolve(cmd)); + } + + if (queryMatches.length > 0) { + return options.all ? queryMatches : queryMatches[0]; + } + return options.all ? [] : null; +} +module.exports = _which; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt new file mode 100644 index 0000000..eead04a --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md new file mode 100644 index 0000000..f9c7c00 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md @@ -0,0 +1,39 @@ +# signal-exit + +[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) +[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) +[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +When you want to fire an event no matter how a process exits: + +* reaching the end of execution. +* explicitly having `process.exit(code)` called. +* having `process.kill(pid, sig)` called. +* receiving a fatal signal from outside the process + +Use `signal-exit`. + +```js +var onExit = require('signal-exit') + +onExit(function (code, signal) { + console.log('process exited!') +}) +``` + +## API + +`var remove = onExit(function (code, signal) {}, options)` + +The return value of the function is a function that will remove the +handler. + +Note that the function *only* fires for signals if the signal would +cause the process to exit. That is, there are no other listeners, and +it is a fatal signal. + +## Options + +* `alwaysLast`: Run this handler after any other signal or exit + handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js new file mode 100644 index 0000000..93703f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js @@ -0,0 +1,202 @@ +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +// grab a reference to node's real process object right away +var process = global.process + +const processOk = function (process) { + return process && + typeof process === 'object' && + typeof process.removeListener === 'function' && + typeof process.emit === 'function' && + typeof process.reallyExit === 'function' && + typeof process.listeners === 'function' && + typeof process.kill === 'function' && + typeof process.pid === 'number' && + typeof process.on === 'function' +} + +// some kind of non-node environment, just no-op +/* istanbul ignore if */ +if (!processOk(process)) { + module.exports = function () { + return function () {} + } +} else { + var assert = require('assert') + var signals = require('./signals.js') + var isWin = /^win/i.test(process.platform) + + var EE = require('events') + /* istanbul ignore if */ + if (typeof EE !== 'function') { + EE = EE.EventEmitter + } + + var emitter + if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ + } else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} + } + + // Because this emitter is a global, we have to check to see if a + // previous version of this library failed to enable infinite listeners. + // I know what you're about to say. But literally everything about + // signal-exit is a compromise with evil. Get used to it. + if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true + } + + module.exports = function (cb, opts) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return function () {} + } + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove + } + + var unload = function unload () { + if (!loaded || !processOk(global.process)) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 + } + module.exports.unload = unload + + var emit = function emit (event, code, signal) { + /* istanbul ignore if */ + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) + } + + // { : , ... } + var sigListeners = {} + signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + /* istanbul ignore next */ + process.kill(process.pid, sig) + } + } + }) + + module.exports.signals = function () { + return signals + } + + var loaded = false + + var load = function load () { + if (loaded || !processOk(global.process)) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit + } + module.exports.load = load + + var originalProcessReallyExit = process.reallyExit + var processReallyExit = function processReallyExit (code) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + process.exitCode = code || /* istanbul ignore next */ 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) + } + + var originalProcessEmit = process.emit + var processEmit = function processEmit (ev, arg) { + if (ev === 'exit' && processOk(global.process)) { + /* istanbul ignore else */ + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + /* istanbul ignore next */ + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json new file mode 100644 index 0000000..e1a0031 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json @@ -0,0 +1,38 @@ +{ + "name": "signal-exit", + "version": "3.0.7", + "description": "when you want to fire an event no matter how a process exits.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "files": [ + "index.js", + "signals.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/tapjs/signal-exit.git" + }, + "keywords": [ + "signal", + "exit" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/tapjs/signal-exit/issues" + }, + "homepage": "https://github.com/tapjs/signal-exit", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^3.1.1", + "nyc": "^15.1.0", + "standard-version": "^9.3.1", + "tap": "^15.1.1" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js new file mode 100644 index 0000000..3bd67a8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js @@ -0,0 +1,53 @@ +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js new file mode 100644 index 0000000..78fc0c5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js @@ -0,0 +1,16 @@ +'use strict'; + +module.exports = input => { + const LF = typeof input === 'string' ? '\n' : '\n'.charCodeAt(); + const CR = typeof input === 'string' ? '\r' : '\r'.charCodeAt(); + + if (input[input.length - 1] === LF) { + input = input.slice(0, input.length - 1); + } + + if (input[input.length - 1] === CR) { + input = input.slice(0, input.length - 1); + } + + return input; +}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json new file mode 100644 index 0000000..d9f2a6c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json @@ -0,0 +1,40 @@ +{ + "name": "strip-final-newline", + "version": "2.0.0", + "description": "Strip the final newline character from a string/buffer", + "license": "MIT", + "repository": "sindresorhus/strip-final-newline", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "delete", + "final", + "last", + "end", + "file", + "newline", + "linebreak", + "character", + "string", + "buffer" + ], + "devDependencies": { + "ava": "^0.25.0", + "xo": "^0.23.0" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md new file mode 100644 index 0000000..32dfd50 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md @@ -0,0 +1,30 @@ +# strip-final-newline [![Build Status](https://travis-ci.com/sindresorhus/strip-final-newline.svg?branch=master)](https://travis-ci.com/sindresorhus/strip-final-newline) + +> Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string/buffer + +Can be useful when parsing the output of, for example, `ChildProcess#execFile`, as [binaries usually output a newline at the end](https://stackoverflow.com/questions/729692/why-should-text-files-end-with-a-newline). Normally, you would use `stdout.trim()`, but that would also remove newlines at the start and whitespace. + + +## Install + +``` +$ npm install strip-final-newline +``` + + +## Usage + +```js +const stripFinalNewline = require('strip-final-newline'); + +stripFinalNewline('foo\nbar\n\n'); +//=> 'foo\nbar\n' + +stripFinalNewline(Buffer.from('foo\nbar\n\n')).toString(); +//=> 'foo\nbar\n' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE new file mode 100644 index 0000000..7cccaf9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md new file mode 100644 index 0000000..38887da --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md @@ -0,0 +1,305 @@ +# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) + +> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex-range +``` + +
+What does this do? + +
+ +This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. + +**Example** + +```js +const toRegexRange = require('to-regex-range'); +const regex = new RegExp(toRegexRange('15', '95')); +``` + +A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). + +
+ +
+ +
+Why use this library? + +
+ +### Convenience + +Creating regular expressions for matching numbers gets deceptively complicated pretty fast. + +For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: + +* regex for matching `1` => `/1/` (easy enough) +* regex for matching `1` through `5` => `/[1-5]/` (not bad...) +* regex for matching `1` or `5` => `/(1|5)/` (still easy...) +* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) +* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) +* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) +* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) + +The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. + +**Learn more** + +If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. + +### Heavily tested + +As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. + +Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. + +### Optimized + +Generated regular expressions are optimized: + +* duplicate sequences and character classes are reduced using quantifiers +* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative +* uses fragment caching to avoid processing the same exact string more than once + +
+ +
+ +## Usage + +Add this library to your javascript application with the following line of code + +```js +const toRegexRange = require('to-regex-range'); +``` + +The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). + +```js +const source = toRegexRange('15', '95'); +//=> 1[5-9]|[2-8][0-9]|9[0-5] + +const regex = new RegExp(`^${source}$`); +console.log(regex.test('14')); //=> false +console.log(regex.test('50')); //=> true +console.log(regex.test('94')); //=> true +console.log(regex.test('96')); //=> false +``` + +## Options + +### options.capture + +**Type**: `boolean` + +**Deafault**: `undefined` + +Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. + +```js +console.log(toRegexRange('-10', '10')); +//=> -[1-9]|-?10|[0-9] + +console.log(toRegexRange('-10', '10', { capture: true })); +//=> (-[1-9]|-?10|[0-9]) +``` + +### options.shorthand + +**Type**: `boolean` + +**Deafault**: `undefined` + +Use the regex shorthand for `[0-9]`: + +```js +console.log(toRegexRange('0', '999999')); +//=> [0-9]|[1-9][0-9]{1,5} + +console.log(toRegexRange('0', '999999', { shorthand: true })); +//=> \d|[1-9]\d{1,5} +``` + +### options.relaxZeros + +**Type**: `boolean` + +**Default**: `true` + +This option relaxes matching for leading zeros when when ranges are zero-padded. + +```js +const source = toRegexRange('-0010', '0010'); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> true +console.log(regex.test('-010')); //=> true +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> true +console.log(regex.test('010')); //=> true +console.log(regex.test('0010')); //=> true +``` + +When `relaxZeros` is false, matching is strict: + +```js +const source = toRegexRange('-0010', '0010', { relaxZeros: false }); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> false +console.log(regex.test('-010')); //=> false +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> false +console.log(regex.test('010')); //=> false +console.log(regex.test('0010')); //=> true +``` + +## Examples + +| **Range** | **Result** | **Compile time** | +| --- | --- | --- | +| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | +| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | +| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | +| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | +| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | +| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | +| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | +| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | +| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | +| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | +| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | +| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | +| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | +| `toRegexRange(5, 5)` | `5` | _8μs_ | +| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | +| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | +| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | +| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | +| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | +| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | +| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | +| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | +| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | +| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | + +## Heads up! + +**Order of arguments** + +When the `min` is larger than the `max`, values will be flipped to create a valid range: + +```js +toRegexRange('51', '29'); +``` + +Is effectively flipped to: + +```js +toRegexRange('29', '51'); +//=> 29|[3-4][0-9]|5[0-1] +``` + +**Steps / increments** + +This library does not support steps (increments). A pr to add support would be welcome. + +## History + +### v2.0.0 - 2017-04-21 + +**New features** + +Adds support for zero-padding! + +### v1.0.0 + +**Optimizations** + +Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. + +## Attribution + +Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") +* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 63 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [doowb](https://github.com/doowb) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js new file mode 100644 index 0000000..77fbace --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js @@ -0,0 +1,288 @@ +/*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +const isNumber = require('is-number'); + +const toRegexRange = (min, max, options) => { + if (isNumber(min) === false) { + throw new TypeError('toRegexRange: expected the first argument to be a number'); + } + + if (max === void 0 || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new TypeError('toRegexRange: expected the second argument to be a number.'); + } + + let opts = { relaxZeros: true, ...options }; + if (typeof opts.strictZeros === 'boolean') { + opts.relaxZeros = opts.strictZeros === false; + } + + let relax = String(opts.relaxZeros); + let shorthand = String(opts.shorthand); + let capture = String(opts.capture); + let wrap = String(opts.wrap); + let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; + + if (toRegexRange.cache.hasOwnProperty(cacheKey)) { + return toRegexRange.cache[cacheKey].result; + } + + let a = Math.min(min, max); + let b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + let result = min + '|' + max; + if (opts.capture) { + return `(${result})`; + } + if (opts.wrap === false) { + return result; + } + return `(?:${result})`; + } + + let isPadded = hasPadding(min) || hasPadding(max); + let state = { min, max, a, b }; + let positives = []; + let negatives = []; + + if (isPadded) { + state.isPadded = isPadded; + state.maxLen = String(state.max).length; + } + + if (a < 0) { + let newMin = b < 0 ? Math.abs(b) : 1; + negatives = splitToPatterns(newMin, Math.abs(a), state, opts); + a = state.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, state, opts); + } + + state.negatives = negatives; + state.positives = positives; + state.result = collatePatterns(negatives, positives, opts); + + if (opts.capture === true) { + state.result = `(${state.result})`; + } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { + state.result = `(?:${state.result})`; + } + + toRegexRange.cache[cacheKey] = state; + return state.result; +}; + +function collatePatterns(neg, pos, options) { + let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + let intersected = filterPatterns(neg, pos, '-?', true, options) || []; + let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + let nines = 1; + let zeros = 1; + + let stop = countNines(min, nines); + let stops = new Set([max]); + + while (min <= stop && stop <= max) { + stops.add(stop); + nines += 1; + stop = countNines(min, nines); + } + + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops.add(stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops = [...stops]; + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return { pattern: start, count: [], digits: 0 }; + } + + let zipped = zip(start, stop); + let digits = zipped.length; + let pattern = ''; + let count = 0; + + for (let i = 0; i < digits; i++) { + let [startDigit, stopDigit] = zipped[i]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit, options); + + } else { + count++; + } + } + + if (count) { + pattern += options.shorthand === true ? '\\d' : '[0-9]'; + } + + return { pattern, count: [count], digits }; +} + +function splitToPatterns(min, max, tok, options) { + let ranges = splitToRanges(min, max); + let tokens = []; + let start = min; + let prev; + + for (let i = 0; i < ranges.length; i++) { + let max = ranges[i]; + let obj = rangeToPattern(String(start), String(max), options); + let zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.count.length > 1) { + prev.count.pop(); + } + + prev.count.push(obj.count[0]); + prev.string = prev.pattern + toQuantifier(prev.count); + start = max + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(max, tok, options); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.count); + tokens.push(obj); + start = max + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + let result = []; + + for (let ele of arr) { + let { string } = ele; + + // only push if _both_ are negative... + if (!intersection && !contains(comparison, 'string', string)) { + result.push(prefix + string); + } + + // or _both_ are positive + if (intersection && contains(comparison, 'string', string)) { + result.push(prefix + string); + } + } + return result; +} + +/** + * Zip strings + */ + +function zip(a, b) { + let arr = []; + for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function contains(arr, key, val) { + return arr.some(ele => ele[key] === val); +} + +function countNines(min, len) { + return Number(String(min).slice(0, -len) + '9'.repeat(len)); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + let [start = 0, stop = ''] = digits; + if (stop || start > 1) { + return `{${start + (stop ? ',' + stop : '')}}`; + } + return ''; +} + +function toCharacterClass(a, b, options) { + return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +} + +function hasPadding(str) { + return /^-?(0+)\d/.test(str); +} + +function padZeros(value, tok, options) { + if (!tok.isPadded) { + return value; + } + + let diff = Math.abs(tok.maxLen - String(value).length); + let relax = options.relaxZeros !== false; + + switch (diff) { + case 0: + return ''; + case 1: + return relax ? '0?' : '0'; + case 2: + return relax ? '0{0,2}' : '00'; + default: { + return relax ? `0{0,${diff}}` : `0{${diff}}`; + } + } +} + +/** + * Cache + */ + +toRegexRange.cache = {}; +toRegexRange.clearCache = () => (toRegexRange.cache = {}); + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json new file mode 100644 index 0000000..4ef194f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json @@ -0,0 +1,88 @@ +{ + "name": "to-regex-range", + "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", + "version": "5.0.1", + "homepage": "https://github.com/micromatch/to-regex-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "micromatch/to-regex-range", + "bugs": { + "url": "https://github.com/micromatch/to-regex-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^7.0.0" + }, + "devDependencies": { + "fill-range": "^6.0.0", + "gulp-format-md": "^2.0.0", + "mocha": "^6.0.2", + "text-table": "^0.2.0", + "time-diff": "^0.3.1" + }, + "keywords": [ + "bash", + "date", + "expand", + "expansion", + "expression", + "glob", + "match", + "match date", + "match number", + "match numbers", + "match year", + "matches", + "matching", + "number", + "numbers", + "numerical", + "range", + "ranges", + "regex", + "regexp", + "regular", + "regular expression", + "sequence" + ], + "verb": { + "layout": "default", + "toc": false, + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "helpers": { + "examples": { + "displayName": "examples" + } + }, + "related": { + "list": [ + "expand-range", + "fill-range", + "micromatch", + "repeat-element", + "repeat-string" + ] + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md new file mode 100644 index 0000000..7fb1f20 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md @@ -0,0 +1,166 @@ +# Changes + + +## 2.0.2 + +* Rename bin to `node-which` + +## 2.0.1 + +* generate changelog and publish on version bump +* enforce 100% test coverage +* Promise interface + +## 2.0.0 + +* Parallel tests, modern JavaScript, and drop support for node < 8 + +## 1.3.1 + +* update deps +* update travis + +## v1.3.0 + +* Add nothrow option to which.sync +* update tap + +## v1.2.14 + +* appveyor: drop node 5 and 0.x +* travis-ci: add node 6, drop 0.x + +## v1.2.13 + +* test: Pass missing option to pass on windows +* update tap +* update isexe to 2.0.0 +* neveragain.tech pledge request + +## v1.2.12 + +* Removed unused require + +## v1.2.11 + +* Prevent changelog script from being included in package + +## v1.2.10 + +* Use env.PATH only, not env.Path + +## v1.2.9 + +* fix for paths starting with ../ +* Remove unused `is-absolute` module + +## v1.2.8 + +* bullet items in changelog that contain (but don't start with) # + +## v1.2.7 + +* strip 'update changelog' changelog entries out of changelog + +## v1.2.6 + +* make the changelog bulleted + +## v1.2.5 + +* make a changelog, and keep it up to date +* don't include tests in package +* Properly handle relative-path executables +* appveyor +* Attach error code to Not Found error +* Make tests pass on Windows + +## v1.2.4 + +* Fix typo + +## v1.2.3 + +* update isexe, fix regression in pathExt handling + +## v1.2.2 + +* update deps, use isexe module, test windows + +## v1.2.1 + +* Sometimes windows PATH entries are quoted +* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. +* doc cli + +## v1.2.0 + +* Add support for opt.all and -as cli flags +* test the bin +* update travis +* Allow checking for multiple programs in bin/which +* tap 2 + +## v1.1.2 + +* travis +* Refactored and fixed undefined error on Windows +* Support strict mode + +## v1.1.1 + +* test +g exes against secondary groups, if available +* Use windows exe semantics on cygwin & msys +* cwd should be first in path on win32, not last +* Handle lower-case 'env.Path' on Windows +* Update docs +* use single-quotes + +## v1.1.0 + +* Add tests, depend on is-absolute + +## v1.0.9 + +* which.js: root is allowed to execute files owned by anyone + +## v1.0.8 + +* don't use graceful-fs + +## v1.0.7 + +* add license to package.json + +## v1.0.6 + +* isc license + +## 1.0.5 + +* Awful typo + +## 1.0.4 + +* Test for path absoluteness properly +* win: Allow '' as a pathext if cmd has a . in it + +## 1.0.3 + +* Remove references to execPath +* Make `which.sync()` work on Windows by honoring the PATHEXT variable. +* Make `isExe()` always return true on Windows. +* MIT + +## 1.0.2 + +* Only files can be exes + +## 1.0.1 + +* Respect the PATHEXT env for win32 support +* should 0755 the bin +* binary +* guts +* package +* 1st diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md new file mode 100644 index 0000000..cd83350 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md @@ -0,0 +1,54 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +var which = require('which') + +// async usage +which('node', function (er, resolvedPath) { + // er is returned if no "node" is found on the PATH + // if it is found, then the absolute path to the exec is returned +}) + +// or promise +which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) + +// sync usage +// throws if not found +var resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +resolved = which.sync('node', {nothrow: true}) + +// Pass options to override the PATH and PATHEXT environment vars. +which('node', { path: someOtherPath }, function (er, resolved) { + if (er) + throw er + console.log('found at %j', resolved) +}) +``` + +## CLI USAGE + +Same as the BSD `which(1)` binary. + +``` +usage: which [-as] program ... +``` + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which new file mode 100755 index 0000000..7cee372 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which @@ -0,0 +1,52 @@ +#!/usr/bin/env node +var which = require("../") +if (process.argv.length < 3) + usage() + +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) +} + +var all = false +var silent = false +var dashdash = false +var args = process.argv.slice(2).filter(function (arg) { + if (dashdash || !/^-/.test(arg)) + return true + + if (arg === '--') { + dashdash = true + return false + } + + var flags = arg.substr(1).split('') + for (var f = 0; f < flags.length; f++) { + var flag = flags[f] + switch (flag) { + case 's': + silent = true + break + case 'a': + all = true + break + default: + console.error('which: illegal option -- ' + flag) + usage() + } + } + return false +}) + +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json new file mode 100644 index 0000000..97ad7fb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json @@ -0,0 +1,43 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "2.0.2", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-which.git" + }, + "main": "which.js", + "bin": { + "node-which": "./bin/node-which" + }, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "devDependencies": { + "mkdirp": "^0.5.0", + "rimraf": "^2.6.2", + "tap": "^14.6.9" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublish": "npm run changelog", + "prechangelog": "bash gen-changelog.sh", + "changelog": "git add CHANGELOG.md", + "postchangelog": "git commit -m 'update changelog - '${npm_package_version}", + "postpublish": "git push origin --follow-tags" + }, + "files": [ + "which.js", + "bin/node-which" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js new file mode 100644 index 0000000..82afffd --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js @@ -0,0 +1,125 @@ +const isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +const path = require('path') +const COLON = isWindows ? ';' : ':' +const isexe = require('isexe') + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, opt) => { + const colon = opt.colon || COLON + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] + : ( + [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(opt.path || process.env.PATH || + /* istanbul ignore next: very unusual */ '').split(colon), + ] + ) + const pathExtExe = isWindows + ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' + : '' + const pathExt = isWindows ? pathExtExe.split(colon) : [''] + + if (isWindows) { + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + return { + pathEnv, + pathExt, + pathExtExe, + } +} + +const which = (cmd, opt, cb) => { + if (typeof opt === 'function') { + cb = opt + opt = {} + } + if (!opt) + opt = {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + const step = i => new Promise((resolve, reject) => { + if (i === pathEnv.length) + return opt.all && found.length ? resolve(found) + : reject(getNotFoundError(cmd)) + + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + resolve(subStep(p, i, 0)) + }) + + const subStep = (p, i, ii) => new Promise((resolve, reject) => { + if (ii === pathExt.length) + return resolve(step(i + 1)) + const ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return resolve(p + ext) + } + return resolve(subStep(p, i, ii + 1)) + }) + }) + + return cb ? step(0).then(res => cb(null, res), cb) : step(0) +} + +const whichSync = (cmd, opt) => { + opt = opt || {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (let i = 0; i < pathEnv.length; i ++) { + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + for (let j = 0; j < pathExt.length; j ++) { + const cur = p + pathExt[j] + try { + const is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } + + if (opt.all && found.length) + return found + + if (opt.nothrow) + return null + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/node_modules/npm-mas-mas/cmaki_identifier/npm-do b/node_modules/npm-mas-mas/cmaki_identifier/npm-do new file mode 100644 index 0000000..4452ece --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/npm-do @@ -0,0 +1,3 @@ +#!/bin/bash +function npm-do { (PATH=$(npm bin):$PATH; eval $@;) } +# set -x PATH ./node_modules/.bin $PATH diff --git a/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json b/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json new file mode 100644 index 0000000..0c82187 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json @@ -0,0 +1,480 @@ +{ + "name": "cmaki_identifier", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "cmaki_identifier", + "version": "1.0.0", + "hasInstallScript": true, + "license": "MIT", + "devDependencies": { + "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm-mas-mas": { + "version": "0.0.1", + "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", + "dev": true, + "license": "MIT", + "dependencies": { + "shelljs": ">=0.8.5" + }, + "bin": { + "cmaki": "cmaki_scripts/cmaki.js" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", + "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + } + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/package.json b/node_modules/npm-mas-mas/cmaki_identifier/package.json new file mode 100644 index 0000000..ecdd629 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/package.json @@ -0,0 +1,30 @@ +{ + "name": "cmaki_identifier", + "version": "1.0.0", + "description": "identify your platform", + "scripts": { + "clean": "cmaki clean", + "setup": "cmaki setup", + "compile": "cmaki compile", + "install": "cmaki setup && cmaki compile", + "test": "cmaki test", + "upload": "cmaki upload" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/makiolo/cmaki_identifier.git" + }, + "keywords": [ + "c++", + "identifier" + ], + "author": "Ricardo Marmolejo García", + "license": "MIT", + "bugs": { + "url": "https://github.com/makiolo/cmaki_identifier/issues" + }, + "homepage": "https://github.com/makiolo/cmaki_identifier#readme", + "devDependencies": { + "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" + } +} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd new file mode 100644 index 0000000..36bd277 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd @@ -0,0 +1,7 @@ +@echo off +if exist "boostorg_predef" ( + rmdir /s /q boostorg_predef +) +git clone -q https://github.com/boostorg/predef.git boostorg_predef + +..\cmaki_scripts\setup.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/setup.sh new file mode 100644 index 0000000..4e1af5c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/setup.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +if [ -d "boostorg_predef" ]; then + rm -Rf boostorg_predef +fi +git clone -q https://github.com/boostorg/predef.git boostorg_predef + +../cmaki_scripts/setup.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt new file mode 100644 index 0000000..b806a9b --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt @@ -0,0 +1,33 @@ +if(${CMAKE_SYSTEM_NAME} MATCHES "Android") + set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") +endif() + +add_executable(cmaki_identifier cmaki_identifier.cpp) + +install(TARGETS cmaki_identifier DESTINATION $ENV{CMAKI_INSTALL}) +install(FILES ../cmaki_identifier.cmake DESTINATION $ENV{CMAKI_INSTALL}) +install(PROGRAMS ../cmaki_identifier.sh DESTINATION $ENV{CMAKI_INSTALL}) +install(PROGRAMS ../cmaki_emulator.sh DESTINATION $ENV{CMAKI_INSTALL}) +add_test( + NAME all + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME os + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME arch + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +add_test( + NAME compiler + COMMAND cmaki_identifier + WORKING_DIRECTORY $ENV{CMAKI_INSTALL} + ) +set_tests_properties(os PROPERTIES ENVIRONMENT "CMAKI_INFO=OS") +set_tests_properties(arch PROPERTIES ENVIRONMENT "CMAKI_INFO=ARCH") +set_tests_properties(compiler PROPERTIES ENVIRONMENT "CMAKI_INFO=COMPILER") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp b/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp new file mode 100644 index 0000000..6cb91e7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp @@ -0,0 +1,345 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __EMSCRIPTEN__ +#include +#endif + +#define STR_HELPER(x) #x +#define STR(x) STR_HELPER(x) + +#ifdef _WIN32 + +// problems with variadic in windows +std::string get_environment(const char* varname, const char* default_) +{ + char* varname_str = getenv(varname); + std::string value_str; + if(varname_str == NULL) + value_str = default_; + else + value_str = varname_str; + return value_str; +} + +#else + +template +std::string get_environment(T default_) +{ + return default_; +} + +template +std::string get_environment(T varname, Args ... others) +{ + char* varname_str = getenv(varname); + std::string value_str; + if(varname_str == NULL) + value_str = get_environment(others...); + else + value_str = varname_str; + return value_str; +} + +#endif + +int main() +{ +#ifdef __EMSCRIPTEN__ + #define OPERATIVE_SYSTEM "javascript" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_WINDOWS + #define OPERATIVE_SYSTEM "windows" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_ANDROID + #define OPERATIVE_SYSTEM "android" + #define OPERATIVE_RESTRICTION "_api_" STR(__ANDROID_API__) +#elif BOOST_OS_LINUX + #define OPERATIVE_SYSTEM "linux" + #ifdef __GLIBC__ + #define OPERATIVE_RESTRICTION "_glibc_" STR(__GLIBC__) "." STR(__GLIBC_MINOR__) + #else + #define OPERATIVE_RESTRICTION "" + #endif +#elif BOOST_OS_MACOS + #define OPERATIVE_SYSTEM "macos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_AIX + #define OPERATIVE_SYSTEM "aix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_AMIGAOS + #define OPERATIVE_SYSTEM "amigaos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_BEOS + #define OPERATIVE_SYSTEM "beos" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_BSD + #if BOOST_OS_BSD_DRAGONFLY + #define OPERATIVE_SYSTEM "dragonfly_bsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_FREE + #define OPERATIVE_SYSTEM "freebsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_BSDI + #define OPERATIVE_SYSTEM "bsdios" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_NET + #define OPERATIVE_SYSTEM "netbsd" + #define OPERATIVE_RESTRICTION "" + #elif BOOST_OS_BSD_OPEN + #define OPERATIVE_SYSTEM "openbsd" + #define OPERATIVE_RESTRICTION "" + #endif +#elif BOOST_OS_CYGWIN + #define OPERATIVE_SYSTEM "cygwin" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_HPUX + #define OPERATIVE_SYSTEM "hpux" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_IRIX + #define OPERATIVE_SYSTEM "irix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_OS400 + #define OPERATIVE_SYSTEM "os400" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_QNX + #define OPERATIVE_SYSTEM "qnx" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_SOLARIS + #define OPERATIVE_SYSTEM "solaris" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_UNIX + #define OPERATIVE_SYSTEM "unix" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_SVR4 + #define OPERATIVE_SYSTEM "svr4" + #define OPERATIVE_RESTRICTION "" +#elif BOOST_OS_VMS + #define OPERATIVE_SYSTEM "vms" + #define OPERATIVE_RESTRICTION "" +#else + #define OPERATIVE_SYSTEM "unknown_so" + #define OPERATIVE_RESTRICTION "" +#endif + +#if BOOST_ARCH_X86 + #if BOOST_ARCH_X86_32 + #define ARCHITECTURE "32" + #elif BOOST_ARCH_X86_64 + #define ARCHITECTURE "64" + #else + #define ARCHITECTURE "unknown_arch" + #endif +#elif BOOST_ARCH_ARM + #define ARCHITECTURE "arm" +#elif BOOST_ARCH_ALPHA + #define ARCHITECTURE "alpha" +#elif BOOST_ARCH_BLACKFIN + #define ARCHITECTURE "blackfin" +#elif BOOST_ARCH_CONVEX + #define ARCHITECTURE "convex" +#elif BOOST_ARCH_IA64 + #define ARCHITECTURE "ia64" +#elif BOOST_ARCH_M68K + #define ARCHITECTURE "m68k" +#elif BOOST_ARCH_MIPS + #define ARCHITECTURE "mips" +#elif BOOST_ARCH_PARISK + #define ARCHITECTURE "parisk" +#elif BOOST_ARCH_PPC + #define ARCHITECTURE "ppc" +#elif BOOST_ARCH_PYRAMID + #define ARCHITECTURE "pyramid" +#elif BOOST_ARCH_RS6000 + #define ARCHITECTURE "rs6000" +#elif BOOST_ARCH_SPARC + #define ARCHITECTURE "sparc" +#elif BOOST_ARCH_SH + #define ARCHITECTURE "sh" +#elif BOOST_ARCH_SYS370 + #define ARCHITECTURE "sys370" +#elif BOOST_ARCH_SYS390 + #define ARCHITECTURE "sys390" +#elif BOOST_ARCH_Z + #define ARCHITECTURE "z" +#else + #define ARCHITECTURE "unknown_arch" +#endif + +#if BOOST_COMP_MSVC + #define COMPILER "msvc" + #if _MSC_VER == 1911 + #define COMPILER_RESTRICTION "_2017" + #elif _MSC_VER == 1910 + #define COMPILER_RESTRICTION "_2017" + #elif _MSC_VER == 1900 + #define COMPILER_RESTRICTION "_2015" + #elif _MSC_VER == 1800 + #define COMPILER_RESTRICTION "_2013" + #elif _MSC_VER == 1700 + #define COMPILER_RESTRICTION "_2012" + #elif _MSC_VER == 1600 + #define COMPILER_RESTRICTION "_2010" + #elif _MSC_VER == 1500 + #define COMPILER_RESTRICTION "_2008" + #elif _MSC_VER == 1400 + #define COMPILER_RESTRICTION "_2005" + #elif _MSC_VER == 1310 + #define COMPILER_RESTRICTION "_2003" + #else + #define COMPILER_RESTRICTION "_msc_ver_" STR(_MSC_VER) + #endif +#elif BOOST_COMP_GNUC + #define COMPILER "gcc" + #define COMPILER_RESTRICTION "_" STR(__GNUC__) +#elif BOOST_COMP_CLANG + #define COMPILER "clang" + #define COMPILER_RESTRICTION "_" STR(__clang_major__) +#elif BOOST_COMP_BORLAND + #define COMPILER "borland" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_COMO + #define COMPILER "comeau" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DEC + #define COMPILER "dec" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DIAB + #define COMPILER "diab" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_DMC + #define COMPILER "dmc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SYSC + #define COMPILER "sysc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_EDG + #define COMPILER "edg" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PATH + #define COMPILER "path" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_GCCXML + #define COMPILER "gccxml" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_GHS + #define COMPILER "ghs" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_HPACC + #define COMPILER "hpacc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_IAR + #define COMPILER "iar" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_IBM + #define COMPILER "ibm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_INTEL + #define COMPILER "intel" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_KCC + #define COMPILER "kcc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_LLVM + #define COMPILER "llvm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_HIGHC + #define COMPILER "highc" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MWERKS + #define COMPILER "mwerks" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MRI + #define COMPILER "mri" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_MPW + #define COMPILER "mrw" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PALM + #define COMPILER "palm" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_PGI + #define COMPILER "pgi" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SGI + #define COMPILER "sgi" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_SUNPRO + #define COMPILER "sunpro" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_TENDRA + #define COMPILER "tendra" + #define COMPILER_RESTRICTION "" +#elif BOOST_COMP_WATCOM + #define COMPILER "watcom" + #define COMPILER_RESTRICTION "" +#else + #define COMPILER "unknown_compiler" + #define COMPILER_RESTRICTION "" +#endif + + // structure (3 chunks joined with "-"): + // 1. platform (2 or 3 chunks joined with "_") + // 1.1. operative system (string but forbidden "_" and "-") + // 1.2. architecture (string but forbidden "_" and "-") + // 1.3. (optional) operative system restriction (is explanation and version joined with "_") + // 1.3.1. what is this restriction (string but forbidden "_" and "-") + // 1.3.2. version (1-4 chunks joined with ".") + // 2. compiler (1 or 2 chunks joined with "_") + // 2.1. compiler (string but forbidden "_" and "-") + // 2.2. (optional) compiler restriction (is version) + // 2.2.1. version (1-4 chunks joined with ".") + // 3. build mode (1 or 2 chunks joined with "_") + // 3.1. build_mode (string but forbidden "_" and "-") + // 3.2. (optional) build mode restrictions + + std::string build_mode = get_environment("MODE", "Debug"); + std::string cmaki_entropy = get_environment("CMAKI_ENTROPY", ""); + std::string cmaki_info = get_environment("CMAKI_INFO", "ALL"); + + std::transform(build_mode.begin(), build_mode.end(), build_mode.begin(), ::tolower); + std::transform(cmaki_entropy.begin(), cmaki_entropy.end(), cmaki_entropy.begin(), ::tolower); + + // TODO: mas consultas + // Arquitectura, sólo el numero: 32 o 64 + // Compilador: COMPILER + COMPILER_RESTRICTION + // Todo: OPERATIVE_SYSTEM + "_" + ARCHITECTURE + OPERATIVE_RESTRICTION + "-" + COMPILER + COMPILER_RESTRICTION + "-" + build_mode + cmaki_entropy + if(cmaki_info == "OS") + { + std::cout << OPERATIVE_SYSTEM + << std::endl; + } + else if(cmaki_info == "ARCH") + { + std::cout << ARCHITECTURE + << std::endl; + } + else if(cmaki_info == "COMPILER") + { + std::cout << COMPILER + << COMPILER_RESTRICTION + << std::endl; + } + else // if(cmaki_info == "ALL") + { + std::cout << OPERATIVE_SYSTEM + << "_" << ARCHITECTURE + << OPERATIVE_RESTRICTION + << "-" << COMPILER + << COMPILER_RESTRICTION + << "-" << build_mode; + if(cmaki_entropy.length() > 0) + { + std::cout << "-" << cmaki_entropy; + } + std::cout << std::endl; + } +} diff --git a/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml b/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml new file mode 100644 index 0000000..44de95c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml @@ -0,0 +1,5 @@ +language: c +services: docker +os: linux +script: + - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_scripts/LICENSE b/node_modules/npm-mas-mas/cmaki_scripts/LICENSE new file mode 100644 index 0000000..53546c1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Ricardo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_scripts/README.md b/node_modules/npm-mas-mas/cmaki_scripts/README.md new file mode 100644 index 0000000..e227c42 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/README.md @@ -0,0 +1,9 @@ +# cmaki_scripts +scripts for cmaki: compile, tests, upload .... + +# windows problems +``` +$ set PATH=%CD%\node_modules\cmaki_scripts;%PATH% +$ echo %PATHEXT% +.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC;.PY;.JS; +``` diff --git a/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd b/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd new file mode 100644 index 0000000..72202c8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd @@ -0,0 +1,15 @@ +@echo off +IF EXIST node_modules\cmaki ( + echo . +) else ( + md node_modules\cmaki + cd node_modules && git clone -q https://github.com/makiolo/cmaki.git && cd .. + cd node_modules/cmaki && rm -Rf .git && cd ..\.. +) +IF EXIST node_modules\cmaki_generator ( + echo . +) else ( + md node_modules\cmaki_generator + cd node_modules && git clone -q https://github.com/makiolo/cmaki_generator.git && cd .. + cd node_modules/cmaki_generator && rm -Rf .git && cd ..\.. +) diff --git a/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd b/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd new file mode 100644 index 0000000..0a2db63 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd @@ -0,0 +1,40 @@ +@echo off + +echo [0/3] preinstall +set PATH=%CMAKI_PWD%\node_modules\cmaki_scripts;%PATH% +env | sort + +powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/cmaki_depends.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" +if %errorlevel% neq 0 exit /b %errorlevel% + +if exist package.json ( + + echo [1/3] prepare + :: call ncu -u + npm cache clean --force + + echo [2/3] compile + npm install + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [3/3] run tests + npm test + if %errorlevel% neq 0 exit /b %errorlevel% + +) else ( + + echo [1/3] prepare + if exist node_modules\cmaki_scripts (rmdir /s /q node_modules\cmaki_scripts) + powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/bootstrap.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [2/3] compile + call node_modules\cmaki_scripts\install.cmd + if %errorlevel% neq 0 exit /b %errorlevel% + + echo [3/3] run tests + call node_modules\cmaki_scripts\test.cmd + if %errorlevel% neq 0 exit /b %errorlevel% + +) + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/ci.sh b/node_modules/npm-mas-mas/cmaki_scripts/ci.sh new file mode 100644 index 0000000..066caae --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/ci.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -e + +export NPP_CACHE="${NPP_CACHE:-FALSE}" + +env | sort + +if [[ -d "bin" ]]; then + rm -Rf bin +fi + +if [[ -d "artifacts" ]]; then + rm -Rf artifacts +fi + +if [[ -d "node_modules" ]]; then + rm -Rf node_modules +fi + +if [ -f "artifacts.json" ]; then + rm artifacts.json +fi + +if [ -f "package.json" ]; then + + echo [1/2] compile + npm install + + echo [2/2] run tests + npm test +else + echo [1/2] compile + ./node_modules/cmaki_scripts/setup.sh && ./node_modules/cmaki_scripts/compile.sh + + echo [2/2] run tests + ./node_modules/cmaki_scripts/test.sh +fi + +if [ -f "cmaki.yml" ]; then + echo [3/3] upload artifact + if [ -f "package.json" ]; then + npm run upload + else + ./node_modules/cmaki_scripts/upload.sh + fi +fi diff --git a/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd b/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd new file mode 100644 index 0000000..5f83632 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd @@ -0,0 +1,3 @@ +@echo off +rd /s /q artifacts 2> NUL +rd /s /q coverage 2> NUL diff --git a/node_modules/npm-mas-mas/cmaki_scripts/clean.sh b/node_modules/npm-mas-mas/cmaki_scripts/clean.sh new file mode 100755 index 0000000..b204603 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/clean.sh @@ -0,0 +1,16 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CC="${CC:-gcc}" +export MODE="${MODE:-Debug}" +export COMPILER_BASENAME=$(basename ${CC}) + +if [ -d $COMPILER_BASENAME/$MODE ]; then + rm -Rf $COMPILER_BASENAME/$MODE +fi +if [ "$NPP_CACHE" == "FALSE" ]; then + rm -Rf artifacts 2> /dev/null +fi +rm -Rf coverage 2> /dev/null +rm -Rf gcc 2> /dev/null +rm -Rf clang 2> /dev/null + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd new file mode 100644 index 0000000..674bfd5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd @@ -0,0 +1,22 @@ +@ECHO OFF +SET DIRWORK=%~dp0 + +IF NOT EXIST "%NODE%" ( + IF DEFINED NODEHOME ( + IF EXIST "%NODEHOME%\node.exe" ( + SET NODE="%NODEHOME%\node.exe" + ) ELSE ( + ECHO Error: Missing node.exe from node home: "%NODEHOME%" + ) + ) ELSE ( + IF EXIST "C:\Program Files\nodejs\node.exe" ( + ECHO WARNING: Defaulting NODE configuration + SET NODE=C:\Program Files\nodejs\node.exe + SET NODEHOME=C:\Program Files\nodejs + ) ELSE ( + ECHO ERROR: NODE configuration unavailable! + ) + ) +) + +"%NODE%" %DIRWORK%\cmaki.js %* diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js new file mode 100755 index 0000000..e204fd7 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js @@ -0,0 +1,193 @@ +#!/usr/bin/env node + +var os = require('os') +var fs = require('fs'); +var path = require('path') +var shelljs = require('shelljs'); +var is_win = (os.platform() === 'win32'); + +if(!process.env.CMAKI_PWD) +{ + if (fs.existsSync(path.join("..", "..", "node_modules", "npm-mas-mas"))) { + shelljs.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); + process.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); + } else { + shelljs.env['CMAKI_PWD'] = path.join(process.cwd()); + process.env['CMAKI_PWD'] = path.join(process.cwd()); + } +} +else +{ + shelljs.env['CMAKI_PWD'] = process.env['CMAKI_PWD']; +} + +if(!process.env.CMAKI_INSTALL) +{ + shelljs.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); + process.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); +} +else +{ + shelljs.env['CMAKI_INSTALL'] = process.env['CMAKI_INSTALL']; +} + +if(!process.env.NPP_SERVER) +{ + shelljs.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' + process.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' +} +else +{ + shelljs.env['NPP_SERVER'] = process.env['NPP_SERVER']; +} + +if(!process.env.NPP_CACHE) +{ + shelljs.env['NPP_CACHE'] = 'TRUE' + process.env['NPP_CACHE'] = 'TRUE' +} +else +{ + shelljs.env['NPP_CACHE'] = process.env['NPP_CACHE']; +} + +if(is_win) +{ + cmaki_identifier = 'cmaki_identifier.cmd' +} +else +{ + cmaki_identifier = 'cmaki_identifier.sh' +} + + +// no check in cmaki_identifier for avoid recursion +if( process.cwd().replace(/\\/g, "/").search("/cmaki_identifier") == -1 ) +{ + if(!fs.existsSync( path.join( process.env['CMAKI_INSTALL'], cmaki_identifier) )) + { + dir_identifier = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_identifier'); + + backup1 = shelljs.env['CMAKI_PWD']; + backup2 = process.env['CMAKI_PWD']; + + shelljs.env['CMAKI_PWD'] = dir_identifier; + process.env['CMAKI_PWD'] = dir_identifier; + + shelljs.cd( dir_identifier ); + + if (shelljs.exec('npm install').code !== 0) { + shelljs.echo('Error detecting compiler (compiling cmaki_identifier ...)'); + shelljs.exit(1); + } + + shelljs.env['CMAKI_PWD'] = backup1; + process.env['CMAKI_PWD'] = backup2; + } +} + +if(!process.env.MODE) +{ + shelljs.env['MODE'] = 'Debug'; + process.env['MODE'] = 'Debug'; +} +else +{ + shelljs.env['MODE'] = process.env['MODE']; +} + +function trim(s) +{ + return ( s || '' ).replace( /^\s+|\s+$/g, '' ); +} + +var environment_vars = []; +next_is_environment_var = false; +process.argv.forEach(function(val, index, array) +{ + if(next_is_environment_var) + { + environment_vars.push(val); + } + next_is_environment_var = (val == '-e'); +}); +environment_vars.forEach(function(val, index, array) +{ + var chunks = val.split("="); + if( chunks.length == 2 ) + { + shelljs.env[chunks[0]] = chunks[1]; + process.env[chunks[0]] = chunks[1]; + } + else + { + console.log("Error in -e with value: " + val); + } +}); + +//////////////////////////////////////////////////////////////////////////////// +// change cwd +shelljs.cd( process.env['CMAKI_PWD'] ); +//////////////////////////////////////////////////////////////////////////////// + + +var dir_script; +var script = process.argv[2]; +if (is_win) +{ + if(fs.existsSync(path.join(process.cwd(), script+".cmd"))) + { + dir_script = process.cwd(); + } + else + { + dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); + } +} +else +{ + if(fs.existsSync(path.join(process.cwd(), script+".sh"))) + { + dir_script = process.cwd(); + } + else + { + dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); + } +} + +if (is_win) +{ + script_execute = path.join(dir_script, script+".cmd"); + exists = fs.existsSync(script_execute); + caller_execute = "cmd /c "; + script_execute = script_execute.replace(/\//g, "\\"); +} +else +{ + script_execute = path.join(dir_script, script+".sh"); + exists = fs.existsSync(script_execute); + caller_execute = "bash "; + script_execute = script_execute.replace(/\\/g, "/"); +} + +console.log("Execute: " + caller_execute + script_execute); + +if(exists) +{ + var child = shelljs.exec(caller_execute + script_execute, {async:true, silent:true}, function(err, stdout, stderr) { + process.exit(err); + }); + child.stdout.on('data', function(data) { + console.log(trim(data)); + }); + child.stderr.on('data', function(data) { + console.log(trim(data)); + }); +} +else +{ + console.log("[error] dont exits: " + script_execute); + process.exit(1); +} + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd new file mode 100644 index 0000000..2b6cea5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd @@ -0,0 +1,7 @@ +@echo off + +pip install pyyaml +if %errorlevel% neq 0 exit /b %errorlevel% + +pip install poster +if %errorlevel% neq 0 exit /b %errorlevel% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh new file mode 100644 index 0000000..e52dc93 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +if [[ "$OSTYPE" =~ ^linux ]]; then + curl -sL https://deb.nodesource.com/setup_8.x | sudo bash - + sudo apt install -y nodejs + sudo npm install -g npm + + # echo 'export PATH=$HOME/local/bin:$PATH' >> ~/.bashrc + # . ~/.bashrc + # mkdir ~/local + # mkdir ~/node-latest-install + # cd ~/node-latest-install + # curl http://nodejs.org/dist/node-latest.tar.gz | tar xz --strip-components=1 + # ./configure --prefix=~/local + # make install # ok, fine, this step probably takes more than 30 seconds... + # curl https://www.npmjs.org/install.sh | sh + # cd - + + sudo apt install -y lcov + sudo apt install -y cppcheck + sudo apt install -y libxaw7-dev # for OIS + sudo apt install -y libgl1-mesa-dev # flow glew + sudo apt install -y freeglut3 freeglut3-dev # for glu (needed for bullet2) + + # cmake 3.5 precompiled + DEPS_DIR=$(pwd)/deps + if [[ -d "$DEPS_DIR" ]]; then + rm -Rf $DEPS_DIR + fi + CMAKE_FILE=cmake-3.5.2-Linux-x86_64.tar.gz + CMAKE_URL=http://www.cmake.org/files/v3.5/${CMAKE_FILE} + wget ${CMAKE_URL} --quiet --no-check-certificate + mkdir -p cmake + tar -xzf ${CMAKE_FILE} -C cmake --strip-components 1 + mv cmake ${DEPS_DIR} + export PATH=${DEPS_DIR}/cmake/bin:${PATH} + cmake --version +else + /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" + brew update + brew doctor + export PATH="/usr/local/bin:$PATH" + brew install node + brew install cmake + brew install lcov + brew install cppcheck +fi +pip install --user pyyaml +pip install --user poster +pip install --user codecov diff --git a/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd b/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd new file mode 100644 index 0000000..178869f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd @@ -0,0 +1,14 @@ +@echo off + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +echo running in mode %MODE% ... +cd %MODE% +cmake --build . --config %MODE% --target install +set lasterror=%errorlevel% +cd .. +exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/compile.sh b/node_modules/npm-mas-mas/cmaki_scripts/compile.sh new file mode 100755 index 0000000..084a6ef --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/compile.sh @@ -0,0 +1,16 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_TARGET="${CMAKI_TARGET:-install}" +export COMPILER_BASENAME=$(basename ${CC}) + +echo "running in mode $MODE ... ($COMPILER_BASENAME)" +cd $COMPILER_BASENAME/$MODE + +# CORES=$(grep -c ^processor /proc/cpuinfo) +CORES=12 +cmake --build . --config $MODE --target $CMAKI_TARGET -- -j$CORES -k VERBOSE=1 || cmake --build . --config $MODE --target $CMAKI_TARGET -- -j1 VERBOSE=1 +code=$? +exit $code diff --git a/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd b/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd new file mode 100644 index 0000000..ae010cb --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd @@ -0,0 +1,28 @@ +@echo off + +if DEFINED COMPILER ( + echo Using COMPILER: %COMPILER% +) else ( + set COMPILER="Visual Studio" + echo Env var COMPILER is not defined. Using by default: %COMPILER% +) + +if DEFINED COMPILER_VERSION ( + echo Using COMPILER_VERSION: %COMPILER_VERSION% +) else ( + set COMPILER_VERSION=16 + echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% +) + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +if "%NPP_CI%" == "FALSE" ( + conan install . --build missing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% +) + +conan create . npm-mas-mas/testing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -tf None + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh b/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh new file mode 100644 index 0000000..8e84f01 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +set -e + +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if [ "$NPP_CI" == "FALSE" ]; then + conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION +fi + +conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/docker.sh b/node_modules/npm-mas-mas/cmaki_scripts/docker.sh new file mode 100755 index 0000000..2b760f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/docker.sh @@ -0,0 +1,22 @@ +#!/bin/bash +export IMAGE="${IMAGE:-linux-x64}" +export MODE="${MODE:-Debug}" +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export PACKAGE="${PACKAGE:-undefined}" + +docker run --rm makiolo/$IMAGE > ./dockcross-$IMAGE +sed -e "s#DEFAULT_DOCKCROSS_IMAGE=dockcross/$IMAGE#DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE#g" dockcross-$IMAGE > makiolo-$IMAGE +chmod +x ./makiolo-$IMAGE +if [ "$PACKAGE" == "undefined" ]; then + # CI + ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash' +else + # build package + ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE -e PACKAGE=$PACKAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.sh | CMAKI_INSTALL=$(pwd)/bin bash' +fi +error=$? + +# clean container +docker rmi -f makiolo/$IMAGE + +exit $error diff --git a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd new file mode 100644 index 0000000..7b70325 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd @@ -0,0 +1,6 @@ +@echo off +git checkout -b tmp +git checkout master +git merge master +git pull + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh new file mode 100755 index 0000000..48c48f1 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e +git checkout -b tmp +git checkout master +git merge master +git pull + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/init.sh b/node_modules/npm-mas-mas/cmaki_scripts/init.sh new file mode 100755 index 0000000..ec6e0f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/init.sh @@ -0,0 +1,18 @@ +#!/bin/sh +PWD="`dirname \"$0\"`" + +cp -v $PWD/init/.travis.yml . +git add .travis.yml + +cp -v $PWD/init/appveyor.yml . +git add appveyor.yml + +cp -v $PWD/init/.clang-format . +git add .clang-format + +cp -v $PWD/init/.gitignore . +git add .gitignore + +cp -v $PWD/init/cmaki.yml . +git add cmaki.yml + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd new file mode 100644 index 0000000..3366ec8 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd @@ -0,0 +1,30 @@ +@echo off + +:: IF DEFINED CMAKI_PWD ( +:: set CMAKI_PWD=%CMAKI_PWD% +:: ) else ( +:: set CMAKI_PWD=%CD% +:: ) +:: +:: IF DEFINED CMAKI_INSTALL ( +:: set CMAKI_INSTALL=%CMAKI_INSTALL% +:: ) else ( +:: set CMAKI_INSTALL=%CMAKI_PWD%/bin +:: ) + +IF DEFINED MODE ( + set MODE=%MODE% +) else ( + set MODE=Debug +) + +IF DEFINED YMLFILE ( + build --yaml=%YMLFILE% -d +) else ( + IF DEFINED PACKAGE ( + build %PACKAGE% -d + ) else ( + echo Error: must define env var YMLFILE or PACKAGE + ) +) + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh new file mode 100755 index 0000000..a0fd049 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export PACKAGE="${PACKAGE:-undefined}" +export YMLFILE="${YMLFILE:-undefined}" + +if [ "$YMLFILE" == "undefined" ]; then + if [ "$PACKAGE" == "undefined" ]; then + echo Error: must define env var YMLFILE or PACKAGE + else + echo building $PACKAGE ... + ./build $PACKAGE --no-back-yaml --no-run-tests -d + fi +else + echo building from yaml file: ${YMLFILE} ... + ./build --yaml=${YMLFILE} --no-run-tests -d +fi diff --git a/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd b/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd new file mode 100644 index 0000000..87c7d0c --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd @@ -0,0 +1,3 @@ +@echo off +git push && npm publish + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/publish.sh b/node_modules/npm-mas-mas/cmaki_scripts/publish.sh new file mode 100755 index 0000000..c74a96f --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/publish.sh @@ -0,0 +1,3 @@ +#!/bin/bash +git push && npm publish + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/replace.sh b/node_modules/npm-mas-mas/cmaki_scripts/replace.sh new file mode 100755 index 0000000..97884f3 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/replace.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +MV="git mv" + +if [[ $3 == "run" ]]; +then + # do sed implace + run=" -i" +else + run="" +fi + +command="ag -w $1 -l --ignore artifacts --ignore node_modules --ignore gcc --ignore clang --ignore bin" +command_search_files="$command | grep -e $1.cpp$ -e $1.h$" +command_search_files_count="$command_search_files | xargs -I{} grep -h -e ^#include {} | grep -h $1 | wc -l" +count=$(eval $command_search_files_count) + +if [[ $count -gt 0 ]]; +then + echo "se renonbrara los siguientes ficheros (utilizando $MV):" + for file in $(eval $command_search_files); + do + destiny=$(echo $file | sed "s/\<$1\>/$2/g") + if [[ $3 == "run" ]]; + then + echo run: $MV $file $destiny + $MV $file $destiny + else + echo dry-run: $MV $file $destiny + fi + done +else + echo "No es necesario renombrar ficheros" +fi + +if [[ $3 == "run" ]]; +then + # echo run: "$command | xargs sed "s/\<$1\>/$2/g" $run" + echo replacing ... +else + echo replace in dry-run +fi +eval $command | xargs -I{} sed "s@\<$1\>@$2@g" $run {} + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/run.cmd b/node_modules/npm-mas-mas/cmaki_scripts/run.cmd new file mode 100644 index 0000000..2acc40d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/run.cmd @@ -0,0 +1,5 @@ +@echo off +call node_modules\cmaki\setup.cmd +call node_modules\cmaki\compile.cmd +call node_modules\cmaki\test.cmd + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/search.sh b/node_modules/npm-mas-mas/cmaki_scripts/search.sh new file mode 100755 index 0000000..0e436b4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/search.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +ag -w --cpp $1 --ignore cmaki --ignore depends --ignore build --ignore cmaki_generator --ignore baul + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd b/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd new file mode 100644 index 0000000..8ac63c5 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd @@ -0,0 +1,64 @@ +@echo off + +setlocal enableextensions + + +:: export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +:: export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if DEFINED COMPILER ( + echo Using COMPILER: %COMPILER% +) else ( + set COMPILER="Visual Studio" + echo Env var COMPILER is not defined. Using by default: %COMPILER% +) + +if DEFINED COMPILER_VERSION ( + echo Using COMPILER_VERSION: %COMPILER_VERSION% +) else ( + set COMPILER_VERSION=16 + echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% +) + +if DEFINED GENERATOR ( + echo Using Visual Studio generator: %GENERATOR% +) else ( + set GENERATOR=Visual Studio 16 2019 + echo Env var GENERATOR is not defined. Using by default: %GENERATOR% +) + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +if "%Platform%" == "x86" ( + set ARCH=x86 +) else ( + set GENERATOR=%GENERATOR% Win64 + set ARCH=x86_64 +) + +echo running in mode %COMPILER% %COMPILER_VERSION% %ARCH% %MODE% ... +if exist %MODE% (rmdir /s /q %MODE%) +md %MODE% + +:: setup +cd %MODE% + +conan install %CMAKI_PWD% --build never -s build_type=%MODE% -s arch=%ARCH% -s arch_build=%ARCH% -s compiler=%COMPILER% -s compiler.version=%COMPILER_VERSION% + +IF DEFINED Configuration ( + IF DEFINED Platform ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -G"%GENERATOR%" -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% + ) ELSE ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% + ) +) ELSE ( + cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% +) + +set lasterror=%errorlevel% +cd %CMAKI_PWD% +exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/setup.sh b/node_modules/npm-mas-mas/cmaki_scripts/setup.sh new file mode 100755 index 0000000..404e5a9 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/setup.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export CMAKI_GENERATOR="${CMAKI_GENERATOR:-Unix Makefiles}" +export COVERAGE="${COVERAGE:-FALSE}" +export TESTS_VALGRIND="${TESTS_VALGRIND:-FALSE}" +export COMPILER_BASENAME=$(basename ${CC}) +export CMAKE_TOOLCHAIN_FILE="${CMAKE_TOOLCHAIN_FILE:-"no cross compile"}" +export BUILD_DIR="${BUILD_DIR:-${COMPILER_BASENAME}/${MODE}}" + +if [ "$CMAKE_TOOLCHAIN_FILE" == "no cross compile" ]; then + export CMAKE_TOOLCHAIN_FILE_FILEPATH="" +else + export CMAKE_TOOLCHAIN_FILE_FILEPATH=" -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" +fi + +echo "running in mode ${MODE} ... ($COMPILER_BASENAME) (${CC} / ${CXX})" + +# setup +if [ ! -d ${BUILD_DIR} ]; then + mkdir -p ${BUILD_DIR} +fi +echo BUILD_DIR=${BUILD_DIR} +cd ${BUILD_DIR} + +if [ -f "CMakeCache.txt" ]; then + rm CMakeCache.txt +fi + +export WITH_CONAN=0 +if [ -f "$CMAKI_PWD/conanfile.txt" ] || [ -f "$CMAKI_PWD/conanfile.py" ]; then + + if [ "$NPP_CI" == "FALSE" ]; then + conan install $CMAKI_PWD --build missing -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} + fi + + echo conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} + if ! conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION}; then + echo Error conan + exit 1 + fi + export WITH_CONAN=1 +fi + +cmake $CMAKI_PWD ${CMAKE_TOOLCHAIN_FILE_FILEPATH} -DCMAKE_MODULE_PATH=${CMAKI_PWD}/node_modules/npm-mas-mas/cmaki -DCMAKE_INSTALL_PREFIX=${CMAKI_INSTALL} -DCMAKE_BUILD_TYPE=${MODE} -DFIRST_ERROR=1 -G"${CMAKI_GENERATOR}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}" -DNPP_CACHE=${NPP_CACHE} -DCOVERAGE=${COVERAGE} -DTESTS_VALGRIND=${TESTS_VALGRIND} -DWITH_CONAN=${WITH_CONAN} +code=$? +exit ${code} diff --git a/node_modules/npm-mas-mas/cmaki_scripts/test.cmd b/node_modules/npm-mas-mas/cmaki_scripts/test.cmd new file mode 100644 index 0000000..33ee4fa --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/test.cmd @@ -0,0 +1,15 @@ +@echo off + +if "%Configuration%" == "Release" ( + set MODE=Release +) else ( + set MODE=Debug +) + +echo running in mode %MODE% ... +cd %MODE% +ctest . --no-compress-output --output-on-failure -T Test -C %MODE% -V +set lasterror=%errorlevel% +cd .. + +if %lasterror% neq 0 exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/test.sh b/node_modules/npm-mas-mas/cmaki_scripts/test.sh new file mode 100755 index 0000000..30ddf60 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/test.sh @@ -0,0 +1,52 @@ +#!/bin/bash +export NPP_CACHE="${NPP_CACHE:-FALSE}" +export NOCODECOV="${NOCODECOV:-FALSE}" +export COVERAGE="${COVERAGE:-FALSE}" +export CPPCHECK="${CPPCHECK:-FALSE}" +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export COMPILER_BASENAME=$(basename ${CC}) + +echo "running in mode $MODE ... ($COMPILER_BASENAME)" +mkdir -p $COMPILER_BASENAME/$MODE +cd $COMPILER_BASENAME/$MODE + +# tests +ctest . --no-compress-output --output-on-failure -T Test -C $MODE -V +code=$? + +# posttests +if [ "$COVERAGE" == "TRUE" ]; then + if [[ "$CC" == "gcc" ]]; then + if [[ "$MODE" == "Debug" ]]; then + find ../.. -name "*.gcno" -o -name "*.gcda" + lcov -c -i -d ../.. -o coverage.base + # aggregate coverage + lcov -c -d ../.. -o coverage.run + # merge pre & run + lcov -d ../.. -a coverage.base -a coverage.run -o coverage.info + lcov -r coverage.info '/usr/*' -o coverage.info + lcov -r coverage.info 'tests/*' -o coverage.info + lcov -r coverage.info 'gtest/*' -o coverage.info + lcov -r coverage.info 'gmock/*' -o coverage.info + lcov -r coverage.info 'node_modules/*' -o coverage.info + # lcov -l coverage.info + genhtml --no-branch-coverage -o ../../coverage/ coverage.info + if [ "$NOCODECOV" == "FALSE" ]; then + bash <(curl -s https://codecov.io/bash) || echo "Codecov did not collect coverage reports" + fi + rm -f coverage.base coverage.run coverage.info + fi + fi +fi + +if [ "$CPPCHECK" == "TRUE" ]; then + if [[ "$CC" == "gcc" ]]; then + if [[ "$MODE" == "Debug" ]]; then + cppcheck -i ../../node_modules -i ../../$COMPILER_BASENAME --inconclusive --check-config --max-configs=10 --enable=all -UDEBUG --inline-suppr ../.. + fi + fi +fi + +exit $code diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd b/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd new file mode 100644 index 0000000..74063e4 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd @@ -0,0 +1,29 @@ +@echo off + +IF DEFINED CMAKI_PWD ( + set CMAKI_PWD=%CMAKI_PWD% +) else ( + set CMAKI_PWD=%CD% +) + +IF DEFINED CMAKI_INSTALL ( + set CMAKI_INSTALL=%CMAKI_INSTALL% +) else ( + set CMAKI_INSTALL=%CMAKI_PWD%/bin +) + +IF DEFINED MODE ( + set MODE=%MODE% +) else ( + set MODE=Debug +) + +set YMLFILE=%CMAKI_PWD%/cmaki.yml + +:: warning, TODO: detectar si hay cambios locales y avisar +git diff %CMAKI_PWD% + +cd %CMAKI_PWD%/node_modules/cmaki_generator +curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.cmd > __make_artifact.cmd +call __make_artifact.cmd +del __make_artifact.cmd diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload.sh b/node_modules/npm-mas-mas/cmaki_scripts/upload.sh new file mode 100755 index 0000000..a088a9e --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/upload.sh @@ -0,0 +1,12 @@ +#!/bin/bash -e + +export CC="${CC:-gcc}" +export CXX="${CXX:-g++}" +export MODE="${MODE:-Debug}" +export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" +export YMLFILE=$CMAKI_PWD/cmaki.yml + +git diff $CMAKI_PWD +cd $CMAKI_PWD/node_modules/npm-mas-mas/cmaki_generator +../cmaki_scripts/make_artifact.sh + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd new file mode 100644 index 0000000..7d4bb06 --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd @@ -0,0 +1,5 @@ +@echo off + +# upload package +conan upload '*' -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh new file mode 100644 index 0000000..f62d19d --- /dev/null +++ b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -e + +# upload package +conan upload '*' -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/docker-compose.yml b/node_modules/npm-mas-mas/docker-compose.yml new file mode 100644 index 0000000..8c0ae81 --- /dev/null +++ b/node_modules/npm-mas-mas/docker-compose.yml @@ -0,0 +1,32 @@ +version: '3' +services: + linux64: + build: + context: . + dockerfile: ./docker/Dockerfile.linux-x64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + + windows64: + build: + context: . + dockerfile: ./docker/Dockerfile.windows-x64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + + android64: + build: + context: . + dockerfile: ./docker/Dockerfile.android-arm64 + environment: + - NPP_SERVER=http://servfactor/cpp + command: make clean build + volumes: + - .:/work + diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 b/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 new file mode 100644 index 0000000..e5b726a --- /dev/null +++ b/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 @@ -0,0 +1,9 @@ +FROM dockcross/android-arm64 +ENV PYTHONUNBUFFERED 1 +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +WORKDIR /work +ADD requirements.txt /work +RUN pip install -r requirements.txt + diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 b/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 new file mode 100644 index 0000000..4a132bd --- /dev/null +++ b/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 @@ -0,0 +1,16 @@ +FROM dockcross/linux-x64 +ENV PYTHONUNBUFFERED 1 +RUN echo 'deb http://ftp.us.debian.org/debian testing main contrib non-free' > /etc/apt/sources.list.d/gcc.testing.list +RUN apt-get update +RUN apt-get install -y -t testing g++ +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +RUN apt install -y libgl1-mesa-dev +RUN apt install -y libx11-dev +RUN apt install -y python3-pip +WORKDIR /work +RUN pip3 install conan==1.6.1 +RUN pip3 install pyyaml==3.13 +RUN pip3 install requests==2.19.1 + diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 b/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 new file mode 100644 index 0000000..d30d465 --- /dev/null +++ b/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 @@ -0,0 +1,9 @@ +FROM dockcross/windows-x64 +ENV PYTHONUNBUFFERED 1 +RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - +RUN apt install -y nodejs +RUN npm install -g npm +WORKDIR /work +ADD requirements.txt /work +RUN pip install -r requirements.txt + diff --git a/node_modules/npm-mas-mas/docker/entrypoint.sh b/node_modules/npm-mas-mas/docker/entrypoint.sh new file mode 100755 index 0000000..122cdaf --- /dev/null +++ b/node_modules/npm-mas-mas/docker/entrypoint.sh @@ -0,0 +1,21 @@ +#!//bin/bash + +export MODE="${MODE:-Debug}" +export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" +export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" + +if [ "$(uname)" == "Darwin" ]; then + # mac + export COMPILER=apple-clang COMPILER_VERSION=10.0 COMPILER_LIBCXX=libc++ +fi + +# compile 3rd parties +# conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION +# conan create . npm-mas-mas/testing --build $PACKAGE -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None +# conan upload $PACKAGE/*@npm-mas-mas/testing -r npm-mas-mas --all -c + +# compile only $PACKAGE +conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None +conan upload *@npm-mas-mas/testing -r npm-mas-mas --all -c + diff --git a/node_modules/npm-mas-mas/package.json b/node_modules/npm-mas-mas/package.json new file mode 100644 index 0000000..72449de --- /dev/null +++ b/node_modules/npm-mas-mas/package.json @@ -0,0 +1,29 @@ +{ + "name": "npm-mas-mas", + "version": "0.0.1", + "description": "npm extension for use packing system with C++", + "bin": { + "cmaki": "./cmaki_scripts/cmaki.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/makiolo/npm-mas-mas.git" + }, + "keywords": [ + "cmake", + "c++", + "artifacts" + ], + "author": "Ricardo Marmolejo García", + "license": "MIT", + "bugs": { + "url": "https://github.com/makiolo/npm-mas-mas/issues" + }, + "homepage": "https://github.com/makiolo/npm-mas-mas#readme", + "dependencies": { + "shelljs": ">=0.8.5" + } +} + + + diff --git a/node_modules/npm-mas-mas/servfactor/Dockerfile b/node_modules/npm-mas-mas/servfactor/Dockerfile new file mode 100644 index 0000000..883467b --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/Dockerfile @@ -0,0 +1,15 @@ +FROM nimmis/apache-php5 + +MAINTAINER Ricardo Marmolejo García + +RUN echo "upload_max_filesize=800M" >> /etc/php5/apache2/php.ini +RUN echo "post_max_size=800M" >> /etc/php5/apache2/php.ini +RUN echo "max_input_time=300" >> /etc/php5/apache2/php.ini +RUN echo "max_execution_time=300" >> /etc/php5/apache2/php.ini +RUN echo "error_reporting = E_ALL" >> /etc/php5/apache2/php.ini +RUN echo "display_errors = On" >> /etc/php5/apache2/php.ini + +WORKDIR /var/www/html/cpp +RUN mkdir -p /var/www/html/packages +RUN chmod -R 777 /var/www/html/packages + diff --git a/node_modules/npm-mas-mas/servfactor/Makefile b/node_modules/npm-mas-mas/servfactor/Makefile new file mode 100644 index 0000000..efbcbeb --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/Makefile @@ -0,0 +1,3 @@ +all: + docker-compose up -d --build + diff --git a/node_modules/npm-mas-mas/servfactor/NOTES.md b/node_modules/npm-mas-mas/servfactor/NOTES.md new file mode 100644 index 0000000..31554b7 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/NOTES.md @@ -0,0 +1,4 @@ +need edit php.ini: + +upload_max_filesize = 500M +post_max_size = 500M diff --git a/node_modules/npm-mas-mas/servfactor/README.md b/node_modules/npm-mas-mas/servfactor/README.md new file mode 100644 index 0000000..7f52707 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/README.md @@ -0,0 +1,10 @@ +# servfactor +- default artifacts path is $(pwd)/packages (can use symbolic links) +- chmod o+w packages/ +- chmod o+w packages/stats.txt + +# php.ini +- upload_max_filesize=800M +- post_max_size=800M +- max_input_time=300 +- max_execution_time=300 diff --git a/node_modules/npm-mas-mas/servfactor/docker-compose.yml b/node_modules/npm-mas-mas/servfactor/docker-compose.yml new file mode 100644 index 0000000..9d85ed5 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/docker-compose.yml @@ -0,0 +1,11 @@ +version: '3' +services: + servfactor: + build: . + volumes: + - .:/var/www/html/cpp + - ./packages:/var/www/html/packages + ports: + - "8080:80" + restart: always + diff --git a/node_modules/npm-mas-mas/servfactor/download.php b/node_modules/npm-mas-mas/servfactor/download.php new file mode 100755 index 0000000..6f536d5 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/download.php @@ -0,0 +1,58 @@ + diff --git a/node_modules/npm-mas-mas/servfactor/index.php b/node_modules/npm-mas-mas/servfactor/index.php new file mode 100755 index 0000000..6881558 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/index.php @@ -0,0 +1,227 @@ += 1024 && $i < ( count( $types ) -1 ); $bytes /= 1024, $i++ ); + return( round( $bytes, 2 ) . " " . $types[$i] ); +} + +if(!$quiet_mode) +{ + +/* get disk space free (in bytes) */ +$df = disk_free_space($packages_dir); +/* and get disk space total (in bytes) */ +$dt = disk_total_space($packages_dir); +/* now we calculate the disk space used (in bytes) */ +$du = $dt - $df; +/* percentage of disk used - this will be used to also set the width % of the progress bar */ +$dp = sprintf('%.2f',($du / $dt) * 100); + +/* and we formate the size from bytes to MB, GB, etc. */ +$df = formatSize($df); +$du = formatSize($du); +$dt = formatSize($dt); + +?> + + + + +
+
% Disk Used
+
+
+ + + +
+
+format("c")); + } +} +arsort($arr); +$arr = array_keys($arr); + +if(!$quiet_mode) +{ + if($dp > 95) + { + for ($i = 1; $i <= 10; $i++) { + $last_file = array_pop($arr); + if(u::ends_with($last_file, "-cmake.tar.gz")) + { + $big_file = str_replace("-cmake.tar.gz", ".tar.gz", $last_file); + if(!unlink($dir . $last_file)) + { + echo "error removing ".$last_file."
"; + } + else + { + echo "removed ".$last_file."
"; + } + + if(!unlink($dir . $big_file)) + { + echo "error removing ".$dir.$big_file."
"; + } + else + { + echo "removed ".$dir.$big_file."
"; + } + break; + } + } + } +} + +foreach($arr as $file) +{ + // bug si el package tiene "-" + if(u::ends_with($file, "-cmake.tar.gz")) + { + // $substance = $file; + + preg_match('/([\w-]+)-([0-9\.]+)-([\w-\.]+)-cmake.tar.gz/', $file, $matches); + $package = $matches[1]; + $version = $matches[2]; + $platform = $matches[3]; + + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $platform = substr($substance, strrpos($substance, "-")+1); + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $version = substr($substance, strrpos($substance, "-")+1); + // $substance = substr($substance, 0, strrpos($substance, "-")); + // $package = $substance; + if(!isset($_REQUEST['platform']) || ($_REQUEST['platform'] == $platform)) + { + $hits_info = get_hits($data, $file); + $hits = $hits_info[0]; + $last_download = $hits_info[1]; + if($last_download === NULL) + { + if(!$quiet_mode) + { + $formatted = "never downloaded"; + } + else + { + $formatted = "---"; + } + } + else + { + if(!$quiet_mode) + { + $formatted = $last_download->format("d-m-Y H:i"); + } + else + { + $formatted = $last_download->format("c"); + } + } + if(!$quiet_mode) + { + echo "" . $package ." (" . $version . ") "; + if($hits > 0) + { + echo "$platform (".$hits." hits, last use: ".$formatted.")"; + } + else + { + echo "$platform (".$hits." hits)"; + } + echo "
"; + } + else + { + print $package.";".$version.";".$platform.";"."download.php?file=".$file.";".$hits.";".$formatted."\n"; + } + } + } +} + +?> + diff --git a/node_modules/npm-mas-mas/servfactor/packages/README.md b/node_modules/npm-mas-mas/servfactor/packages/README.md new file mode 100644 index 0000000..734fc3d --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/packages/README.md @@ -0,0 +1,2 @@ +packages dir + diff --git a/node_modules/npm-mas-mas/servfactor/stats.php b/node_modules/npm-mas-mas/servfactor/stats.php new file mode 100644 index 0000000..1ab9900 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/stats.php @@ -0,0 +1,68 @@ +
"; + }; + $f = fopen($stats, 'r'); + $data = fread($f, filesize($stats)); + $data = unserialize($data); + fclose($f); + } + else + { + $data = array(); + } + + return $data; +} + +function inc_stats($data, $key) +{ + $key = basename($key); + + if(array_key_exists($key, $data)) + { + $data[$key][0] = $data[$key][0] + 1; + $data[$key][1] = new DateTime('NOW'); + } + else + { + $data[$key] = array(1, new DateTime('NOW')); + } + return $data; +} + +function get_hits($data, $key) +{ + $key = basename($key); + + if(array_key_exists($key, $data)) + { + return $data[$key]; + } + else + { + return array(0, NULL); + } +} + +function write_stats($data) +{ + global $stats; + $f = fopen($stats, 'w'); + $data = serialize($data); + fwrite($f, $data); + fclose($f); +} + +?> + diff --git a/node_modules/npm-mas-mas/servfactor/upload.php b/node_modules/npm-mas-mas/servfactor/upload.php new file mode 100644 index 0000000..f57bc22 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/upload.php @@ -0,0 +1,76 @@ +" . $artifacts . ""; + +if(!is_writable($artifacts)) +{ + echo "I don't have permission
"; + exit(1); +} + +$uploaded_file = $artifacts . "/" . basename($_FILES['uploaded']['name']); + +// if(isset($_FILES['uploaded']) && file_exists($uploaded_file)) +// { +// echo "file: ".$uploaded_file." already esxists!"; +// exit(1); +// } + +if (move_uploaded_file($_FILES['uploaded']['tmp_name'], $uploaded_file)) +{ + echo "El fichero es valido y se subio con exito: ". $uploaded_file .".\n"; +} +else +{ +?> +
+ Enviar este fichero: + +
+
+ + diff --git a/node_modules/npm-mas-mas/servfactor/util.php b/node_modules/npm-mas-mas/servfactor/util.php new file mode 100755 index 0000000..ac69f78 --- /dev/null +++ b/node_modules/npm-mas-mas/servfactor/util.php @@ -0,0 +1,2584 @@ + + * @link http://github.com/brandonwamboldt/utilphp/ Official Documentation + */ +class util +{ + /** + * A constant representing the number of seconds in a minute, for + * making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_MINUTE = 60; + + /** + * A constant representing the number of seconds in an hour, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_HOUR = 3600; + const SECONDS_IN_AN_HOUR = 3600; + + /** + * A constant representing the number of seconds in a day, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_DAY = 86400; + + /** + * A constant representing the number of seconds in a week, for making + * code more verbose + * + * @var integer + */ + const SECONDS_IN_A_WEEK = 604800; + + /** + * A constant representing the number of seconds in a month (30 days), + * for making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_MONTH = 2592000; + + /** + * A constant representing the number of seconds in a year (365 days), + * for making code more verbose + * + * @var integer + */ + const SECONDS_IN_A_YEAR = 31536000; + + /** + * URL constants as defined in the PHP Manual under "Constants usable with + * http_build_url()". + * + * @see http://us2.php.net/manual/en/http.constants.php#http.constants.url + */ + const HTTP_URL_REPLACE = 1; + const HTTP_URL_JOIN_PATH = 2; + const HTTP_URL_JOIN_QUERY = 4; + const HTTP_URL_STRIP_USER = 8; + const HTTP_URL_STRIP_PASS = 16; + const HTTP_URL_STRIP_AUTH = 32; + const HTTP_URL_STRIP_PORT = 64; + const HTTP_URL_STRIP_PATH = 128; + const HTTP_URL_STRIP_QUERY = 256; + const HTTP_URL_STRIP_FRAGMENT = 512; + const HTTP_URL_STRIP_ALL = 1024; + + /** + * A collapse icon, using in the dump_var function to allow collapsing + * an array or object + * + * @var string + */ + public static $icon_collapse = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFNzFDNDQyNEMyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFNzFDNDQyM0MyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NDlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuF4AWkAAAA2UExURU9t2DBStczM/1h16DNmzHiW7iNFrypMvrnD52yJ4ezs7Onp6ejo6P///+Tk5GSG7D9h5SRGq0Q2K74AAAA/SURBVHjaLMhZDsAgDANRY3ZISnP/y1ZWeV+jAeuRSky6cKL4ryDdSggP8UC7r6GvR1YHxjazPQDmVzI/AQYAnFQDdVSJ80EAAAAASUVORK5CYII='; + + /** + * A collapse icon, using in the dump_var function to allow collapsing + * an array or object + * + * @var string + */ + public static $icon_expand = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFQzZERTJDNEMyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFQzZERTJDM0MyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3MzlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PkmDvWIAAABIUExURU9t2MzM/3iW7ubm59/f5urq85mZzOvr6////9ra38zMzObm5rfB8FZz5myJ4SNFrypMvjBStTNmzOvr+mSG7OXl8T9h5SRGq/OfqCEAAABKSURBVHjaFMlbEoAwCEPRULXF2jdW9r9T4czcyUdA4XWB0IgdNSybxU9amMzHzDlPKKu7Fd1e6+wY195jW0ARYZECxPq5Gn8BBgCr0gQmxpjKAwAAAABJRU5ErkJggg=='; + + private static $hasArray = false; + + /** + * Map of special non-ASCII characters and suitable ASCII replacement + * characters. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + public static $maps = array( + 'de' => array(/* German */ + 'Ä' => 'Ae', 'Ö' => 'Oe', 'Ü' => 'Ue', 'ä' => 'ae', 'ö' => 'oe', 'ü' => 'ue', 'ß' => 'ss', + 'ẞ' => 'SS' + ), + 'latin' => array( + 'À' => 'A', 'Á' => 'A', 'Â' => 'A', 'Ã' => 'A', 'Ä' => 'A', 'Å' => 'A','Ă' => 'A', 'Æ' => 'AE', 'Ç' => + 'C', 'È' => 'E', 'É' => 'E', 'Ê' => 'E', 'Ë' => 'E', 'Ì' => 'I', 'Í' => 'I', 'Î' => 'I', + 'Ï' => 'I', 'Ð' => 'D', 'Ñ' => 'N', 'Ò' => 'O', 'Ó' => 'O', 'Ô' => 'O', 'Õ' => 'O', 'Ö' => + 'O', 'Ő' => 'O', 'Ø' => 'O','Ș' => 'S','Ț' => 'T', 'Ù' => 'U', 'Ú' => 'U', 'Û' => 'U', 'Ü' => 'U', 'Ű' => 'U', + 'Ý' => 'Y', 'Þ' => 'TH', 'ß' => 'ss', 'à' => 'a', 'á' => 'a', 'â' => 'a', 'ã' => 'a', 'ä' => + 'a', 'å' => 'a', 'ă' => 'a', 'æ' => 'ae', 'ç' => 'c', 'è' => 'e', 'é' => 'e', 'ê' => 'e', 'ë' => 'e', + 'ì' => 'i', 'í' => 'i', 'î' => 'i', 'ï' => 'i', 'ð' => 'd', 'ñ' => 'n', 'ò' => 'o', 'ó' => + 'o', 'ô' => 'o', 'õ' => 'o', 'ö' => 'o', 'ő' => 'o', 'ø' => 'o', 'ș' => 's', 'ț' => 't', 'ù' => 'u', 'ú' => 'u', + 'û' => 'u', 'ü' => 'u', 'ű' => 'u', 'ý' => 'y', 'þ' => 'th', 'ÿ' => 'y' + ), + 'latin_symbols' => array( + '©' => '(c)', + '®' => '(r)' + ), + 'el' => array(/* Greek */ + 'α' => 'a', 'β' => 'b', 'γ' => 'g', 'δ' => 'd', 'ε' => 'e', 'ζ' => 'z', 'η' => 'h', 'θ' => '8', + 'ι' => 'i', 'κ' => 'k', 'λ' => 'l', 'μ' => 'm', 'ν' => 'n', 'ξ' => '3', 'ο' => 'o', 'π' => 'p', + 'ρ' => 'r', 'σ' => 's', 'τ' => 't', 'υ' => 'y', 'φ' => 'f', 'χ' => 'x', 'ψ' => 'ps', 'ω' => 'w', + 'ά' => 'a', 'έ' => 'e', 'ί' => 'i', 'ό' => 'o', 'ύ' => 'y', 'ή' => 'h', 'ώ' => 'w', 'ς' => 's', + 'ϊ' => 'i', 'ΰ' => 'y', 'ϋ' => 'y', 'ΐ' => 'i', + 'Α' => 'A', 'Β' => 'B', 'Γ' => 'G', 'Δ' => 'D', 'Ε' => 'E', 'Ζ' => 'Z', 'Η' => 'H', 'Θ' => '8', + 'Ι' => 'I', 'Κ' => 'K', 'Λ' => 'L', 'Μ' => 'M', 'Ν' => 'N', 'Ξ' => '3', 'Ο' => 'O', 'Π' => 'P', + 'Ρ' => 'R', 'Σ' => 'S', 'Τ' => 'T', 'Υ' => 'Y', 'Φ' => 'F', 'Χ' => 'X', 'Ψ' => 'PS', 'Ω' => 'W', + 'Ά' => 'A', 'Έ' => 'E', 'Ί' => 'I', 'Ό' => 'O', 'Ύ' => 'Y', 'Ή' => 'H', 'Ώ' => 'W', 'Ϊ' => 'I', + 'Ϋ' => 'Y' + ), + 'tr' => array(/* Turkish */ + 'ş' => 's', 'Ş' => 'S', 'ı' => 'i', 'İ' => 'I', 'ç' => 'c', 'Ç' => 'C', 'ü' => 'u', 'Ü' => 'U', + 'ö' => 'o', 'Ö' => 'O', 'ğ' => 'g', 'Ğ' => 'G' + ), + 'ru' => array(/* Russian */ + 'а' => 'a', 'б' => 'b', 'в' => 'v', 'г' => 'g', 'д' => 'd', 'е' => 'e', 'ё' => 'yo', 'ж' => 'zh', + 'з' => 'z', 'и' => 'i', 'й' => 'j', 'к' => 'k', 'л' => 'l', 'м' => 'm', 'н' => 'n', 'о' => 'o', + 'п' => 'p', 'р' => 'r', 'с' => 's', 'т' => 't', 'у' => 'u', 'ф' => 'f', 'х' => 'h', 'ц' => 'c', + 'ч' => 'ch', 'ш' => 'sh', 'щ' => 'sh', 'ъ' => '', 'ы' => 'y', 'ь' => '', 'э' => 'e', 'ю' => 'yu', + 'я' => 'ya', + 'А' => 'A', 'Б' => 'B', 'В' => 'V', 'Г' => 'G', 'Д' => 'D', 'Е' => 'E', 'Ё' => 'Yo', 'Ж' => 'Zh', + 'З' => 'Z', 'И' => 'I', 'Й' => 'J', 'К' => 'K', 'Л' => 'L', 'М' => 'M', 'Н' => 'N', 'О' => 'O', + 'П' => 'P', 'Р' => 'R', 'С' => 'S', 'Т' => 'T', 'У' => 'U', 'Ф' => 'F', 'Х' => 'H', 'Ц' => 'C', + 'Ч' => 'Ch', 'Ш' => 'Sh', 'Щ' => 'Sh', 'Ъ' => '', 'Ы' => 'Y', 'Ь' => '', 'Э' => 'E', 'Ю' => 'Yu', + 'Я' => 'Ya', + '№' => '' + ), + 'uk' => array(/* Ukrainian */ + 'Є' => 'Ye', 'І' => 'I', 'Ї' => 'Yi', 'Ґ' => 'G', 'є' => 'ye', 'і' => 'i', 'ї' => 'yi', 'ґ' => 'g' + ), + 'cs' => array(/* Czech */ + 'č' => 'c', 'ď' => 'd', 'ě' => 'e', 'ň' => 'n', 'ř' => 'r', 'š' => 's', 'ť' => 't', 'ů' => 'u', + 'ž' => 'z', 'Č' => 'C', 'Ď' => 'D', 'Ě' => 'E', 'Ň' => 'N', 'Ř' => 'R', 'Š' => 'S', 'Ť' => 'T', + 'Ů' => 'U', 'Ž' => 'Z' + ), + 'pl' => array(/* Polish */ + 'ą' => 'a', 'ć' => 'c', 'ę' => 'e', 'ł' => 'l', 'ń' => 'n', 'ó' => 'o', 'ś' => 's', 'ź' => 'z', + 'ż' => 'z', 'Ą' => 'A', 'Ć' => 'C', 'Ę' => 'e', 'Ł' => 'L', 'Ń' => 'N', 'Ó' => 'O', 'Ś' => 'S', + 'Ź' => 'Z', 'Ż' => 'Z' + ), + 'ro' => array(/* Romanian */ + 'ă' => 'a', 'â' => 'a', 'î' => 'i', 'ș' => 's', 'ț' => 't', 'Ţ' => 'T', 'ţ' => 't' + ), + 'lv' => array(/* Latvian */ + 'ā' => 'a', 'č' => 'c', 'ē' => 'e', 'ģ' => 'g', 'ī' => 'i', 'ķ' => 'k', 'ļ' => 'l', 'ņ' => 'n', + 'š' => 's', 'ū' => 'u', 'ž' => 'z', 'Ā' => 'A', 'Č' => 'C', 'Ē' => 'E', 'Ģ' => 'G', 'Ī' => 'i', + 'Ķ' => 'k', 'Ļ' => 'L', 'Ņ' => 'N', 'Š' => 'S', 'Ū' => 'u', 'Ž' => 'Z' + ), + 'lt' => array(/* Lithuanian */ + 'ą' => 'a', 'č' => 'c', 'ę' => 'e', 'ė' => 'e', 'į' => 'i', 'š' => 's', 'ų' => 'u', 'ū' => 'u', 'ž' => 'z', + 'Ą' => 'A', 'Č' => 'C', 'Ę' => 'E', 'Ė' => 'E', 'Į' => 'I', 'Š' => 'S', 'Ų' => 'U', 'Ū' => 'U', 'Ž' => 'Z' + ), + 'vn' => array(/* Vietnamese */ + 'Á' => 'A', 'À' => 'A', 'Ả' => 'A', 'Ã' => 'A', 'Ạ' => 'A', 'Ă' => 'A', 'Ắ' => 'A', 'Ằ' => 'A', 'Ẳ' => 'A', 'Ẵ' => 'A', 'Ặ' => 'A', 'Â' => 'A', 'Ấ' => 'A', 'Ầ' => 'A', 'Ẩ' => 'A', 'Ẫ' => 'A', 'Ậ' => 'A', + 'á' => 'a', 'à' => 'a', 'ả' => 'a', 'ã' => 'a', 'ạ' => 'a', 'ă' => 'a', 'ắ' => 'a', 'ằ' => 'a', 'ẳ' => 'a', 'ẵ' => 'a', 'ặ' => 'a', 'â' => 'a', 'ấ' => 'a', 'ầ' => 'a', 'ẩ' => 'a', 'ẫ' => 'a', 'ậ' => 'a', + 'É' => 'E', 'È' => 'E', 'Ẻ' => 'E', 'Ẽ' => 'E', 'Ẹ' => 'E', 'Ê' => 'E', 'Ế' => 'E', 'Ề' => 'E', 'Ể' => 'E', 'Ễ' => 'E', 'Ệ' => 'E', + 'é' => 'e', 'è' => 'e', 'ẻ' => 'e', 'ẽ' => 'e', 'ẹ' => 'e', 'ê' => 'e', 'ế' => 'e', 'ề' => 'e', 'ể' => 'e', 'ễ' => 'e', 'ệ' => 'e', + 'Í' => 'I', 'Ì' => 'I', 'Ỉ' => 'I', 'Ĩ' => 'I', 'Ị' => 'I', 'í' => 'i', 'ì' => 'i', 'ỉ' => 'i', 'ĩ' => 'i', 'ị' => 'i', + 'Ó' => 'O', 'Ò' => 'O', 'Ỏ' => 'O', 'Õ' => 'O', 'Ọ' => 'O', 'Ô' => 'O', 'Ố' => 'O', 'Ồ' => 'O', 'Ổ' => 'O', 'Ỗ' => 'O', 'Ộ' => 'O', 'Ơ' => 'O', 'Ớ' => 'O', 'Ờ' => 'O', 'Ở' => 'O', 'Ỡ' => 'O', 'Ợ' => 'O', + 'ó' => 'o', 'ò' => 'o', 'ỏ' => 'o', 'õ' => 'o', 'ọ' => 'o', 'ô' => 'o', 'ố' => 'o', 'ồ' => 'o', 'ổ' => 'o', 'ỗ' => 'o', 'ộ' => 'o', 'ơ' => 'o', 'ớ' => 'o', 'ờ' => 'o', 'ở' => 'o', 'ỡ' => 'o', 'ợ' => 'o', + 'Ú' => 'U', 'Ù' => 'U', 'Ủ' => 'U', 'Ũ' => 'U', 'Ụ' => 'U', 'Ư' => 'U', 'Ứ' => 'U', 'Ừ' => 'U', 'Ử' => 'U', 'Ữ' => 'U', 'Ự' => 'U', + 'ú' => 'u', 'ù' => 'u', 'ủ' => 'u', 'ũ' => 'u', 'ụ' => 'u', 'ư' => 'u', 'ứ' => 'u', 'ừ' => 'u', 'ử' => 'u', 'ữ' => 'u', 'ự' => 'u', + 'Ý' => 'Y', 'Ỳ' => 'Y', 'Ỷ' => 'Y', 'Ỹ' => 'Y', 'Ỵ' => 'Y', 'ý' => 'y', 'ỳ' => 'y', 'ỷ' => 'y', 'ỹ' => 'y', 'ỵ' => 'y', + 'Đ' => 'D', 'đ' => 'd' + ), + 'ar' => array(/* Arabic */ + 'أ' => 'a', 'ب' => 'b', 'ت' => 't', 'ث' => 'th', 'ج' => 'g', 'ح' => 'h', 'خ' => 'kh', 'د' => 'd', + 'ذ' => 'th', 'ر' => 'r', 'ز' => 'z', 'س' => 's', 'ش' => 'sh', 'ص' => 's', 'ض' => 'd', 'ط' => 't', + 'ظ' => 'th', 'ع' => 'aa', 'غ' => 'gh', 'ف' => 'f', 'ق' => 'k', 'ك' => 'k', 'ل' => 'l', 'م' => 'm', + 'ن' => 'n', 'ه' => 'h', 'و' => 'o', 'ي' => 'y' + ), + 'sr' => array(/* Serbian */ + 'ђ' => 'dj', 'ј' => 'j', 'љ' => 'lj', 'њ' => 'nj', 'ћ' => 'c', 'џ' => 'dz', 'đ' => 'dj', + 'Ђ' => 'Dj', 'Ј' => 'j', 'Љ' => 'Lj', 'Њ' => 'Nj', 'Ћ' => 'C', 'Џ' => 'Dz', 'Đ' => 'Dj' + ), + 'az' => array(/* Azerbaijani */ + 'ç' => 'c', 'ə' => 'e', 'ğ' => 'g', 'ı' => 'i', 'ö' => 'o', 'ş' => 's', 'ü' => 'u', + 'Ç' => 'C', 'Ə' => 'E', 'Ğ' => 'G', 'İ' => 'I', 'Ö' => 'O', 'Ş' => 'S', 'Ü' => 'U' + ), + 'fi' => array(/* Finnish */ + 'ä' => 'a', + 'ö' => 'o' + ), + ); + + /** + * The character map for the designated language + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $map = array(); + + /** + * The character list as a string. + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $chars = ''; + + /** + * The character list as a regular expression. + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $regex = ''; + + /** + * The current language + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static $language = ''; + + /** + * Initializes the character map. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + */ + private static function initLanguageMap($language = '') + { + if (count(self::$map) > 0 && (($language == '') || ($language == self::$language))) { + return; + } + + // Is a specific map associated with $language? + if (isset(self::$maps[$language]) && is_array(self::$maps[$language])) { + // Move this map to end. This means it will have priority over others + $m = self::$maps[$language]; + unset(self::$maps[$language]); + self::$maps[$language] = $m; + } + + // Reset static vars + self::$language = $language; + self::$map = array(); + self::$chars = ''; + + foreach (self::$maps as $map) { + foreach ($map as $orig => $conv) { + self::$map[$orig] = $conv; + self::$chars .= $orig; + } + } + + self::$regex = '/[' . self::$chars . ']/u'; + } + + /** + * Remove the duplicates from an array. + * + * This is faster version than the builtin array_unique(). + * + * Notes on time requirements: + * array_unique -> O(n log n) + * array_flip -> O(n) + * + * http://stackoverflow.com/questions/8321620/array-unique-vs-array-flip + * http://php.net/manual/en/function.array-unique.php + * + * @param $array + * @return $array + */ + public static function fast_array_unique($array) + { + $array = array_keys(array_flip($array)); + + return $array; + } + + /** + * Access an array index, retrieving the value stored there if it + * exists or a default if it does not. This function allows you to + * concisely access an index which may or may not exist without + * raising a warning. + * + * @param array $var Array value to access + * @param mixed $default Default value to return if the key is not + * present in the array + * @return mixed + */ + public static function array_get(&$var, $default = null) + { + if (isset($var)) { + return $var; + } + + return $default; + } + + /** + * Display a variable's contents using nice HTML formatting and will + * properly display the value of booleans as true or false + * + * @see recursiveVarDumpHelper() + * + * @param mixed $var The variable to dump + * @return string + */ + public static function var_dump($var, $return = false, $expandLevel = 1) + { + self::$hasArray = false; + $toggScript = 'var colToggle = function(toggID) {var img = document.getElementById(toggID);if (document.getElementById(toggID + "-collapsable").style.display == "none") {document.getElementById(toggID + "-collapsable").style.display = "inline";setImg(toggID, 0);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling;while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}} else {document.getElementById(toggID + "-collapsable").style.display = "none";setImg(toggID, 1);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling; while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}}};'; + $imgScript = 'var setImg = function(objID,imgID,addStyle) {var imgStore = ["data:image/png;base64,' . self::$icon_collapse . '", "data:image/png;base64,' . self::$icon_expand . '"];if (objID) {document.getElementById(objID).setAttribute("src", imgStore[imgID]);if (addStyle){document.getElementById(objID).setAttribute("style", "position:relative;left:-5px;top:-1px;cursor:pointer;");}}};'; + $jsCode = preg_replace('/ +/', ' ', ''); + $html = '
';
+        $done  = array();
+        $html .= self::recursiveVarDumpHelper($var, intval($expandLevel), 0, $done);
+        $html .= '
'; + + if (self::$hasArray) { + $html = $jsCode . $html; + } + + if (!$return) { + echo $html; + } + + return $html; + } + + /** + * Display a variable's contents using nice HTML formatting (Without + * the
 tag) and will properly display the values of variables
+     * like booleans and resources. Supports collapsable arrays and objects
+     * as well.
+     *
+     * @param  mixed $var The variable to dump
+     * @return string
+     */
+    protected static function recursiveVarDumpHelper($var, $expLevel, $depth = 0, $done = array())
+    {
+        $html = '';
+
+        if ($expLevel > 0) {
+            $expLevel--;
+            $setImg = 0;
+            $setStyle = 'display:inline;';
+        } elseif ($expLevel == 0) {
+            $setImg = 1;
+            $setStyle='display:none;';
+        } elseif ($expLevel < 0) {
+            $setImg = 0;
+            $setStyle = 'display:inline;';
+        }
+
+        if (is_bool($var)) {
+            $html .= 'bool(' . (($var) ? 'true' : 'false') . ')';
+        } elseif (is_int($var)) {
+            $html .= 'int(' . $var . ')';
+        } elseif (is_float($var)) {
+            $html .= 'float(' . $var . ')';
+        } elseif (is_string($var)) {
+            $html .= 'string(' . strlen($var) . ') "' . self::htmlentities($var) . '"';
+        } elseif (is_null($var)) {
+            $html .= 'NULL';
+        } elseif (is_resource($var)) {
+            $html .= 'resource("' . get_resource_type($var) . '") "' . $var . '"';
+        } elseif (is_array($var)) {
+            // Check for recursion
+            if ($depth > 0) {
+                foreach ($done as $prev) {
+                    if ($prev === $var) {
+                        $html .= 'array(' . count($var) . ') *RECURSION DETECTED*';
+                        return $html;
+                    }
+                }
+
+                // Keep track of variables we have already processed to detect recursion
+                $done[] = &$var;
+            }
+
+            self::$hasArray = true;
+            $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000);
+
+            $html .= (!empty($var) ? ' ' : '') . 'array(' . count($var) . ')';
+            if (!empty($var)) {
+                $html .= ' 
[
'; + + $indent = 4; + $longest_key = 0; + + foreach ($var as $key => $value) { + if (is_string($key)) { + $longest_key = max($longest_key, strlen($key) + 2); + } else { + $longest_key = max($longest_key, strlen($key)); + } + } + + foreach ($var as $key => $value) { + if (is_numeric($key)) { + $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); + } else { + $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); + } + + $html .= ' => '; + + $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); + + foreach ($value as $line => $val) { + if ($line != 0) { + $value[$line] = str_repeat(' ', $indent * 2) . $val; + } + } + + $html .= implode('
', $value) . '
'; + } + + $html .= ']
'; + } + } elseif (is_object($var)) { + // Check for recursion + foreach ($done as $prev) { + if ($prev === $var) { + $html .= 'object(' . get_class($var) . ') *RECURSION DETECTED*'; + return $html; + } + } + + // Keep track of variables we have already processed to detect recursion + $done[] = &$var; + + self::$hasArray=true; + $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000); + + $html .= ' object(' . get_class($var) . ')
[
'; + + $varArray = (array) $var; + + $indent = 4; + $longest_key = 0; + + foreach ($varArray as $key => $value) { + if (substr($key, 0, 2) == "\0*") { + unset($varArray[$key]); + $key = 'protected:' . substr($key, 3); + $varArray[$key] = $value; + } elseif (substr($key, 0, 1) == "\0") { + unset($varArray[$key]); + $key = 'private:' . substr($key, 1, strpos(substr($key, 1), "\0")) . ':' . substr($key, strpos(substr($key, 1), "\0") + 2); + $varArray[$key] = $value; + } + + if (is_string($key)) { + $longest_key = max($longest_key, strlen($key) + 2); + } else { + $longest_key = max($longest_key, strlen($key)); + } + } + + foreach ($varArray as $key => $value) { + if (is_numeric($key)) { + $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); + } else { + $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); + } + + $html .= ' => '; + + $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); + + foreach ($value as $line => $val) { + if ($line != 0) { + $value[$line] = str_repeat(' ', $indent * 2) . $val; + } + } + + $html .= implode('
', $value) . '
'; + } + + $html .= ']
'; + } + + return $html; + } + + /** + * Converts any accent characters to their equivalent normal characters + * and converts any other non-alphanumeric characters to dashes, then + * converts any sequence of two or more dashes to a single dash. This + * function generates slugs safe for use as URLs, and if you pass true + * as the second parameter, it will create strings safe for use as CSS + * classes or IDs. + * + * @param string $string A string to convert to a slug + * @param string $separator The string to separate words with + * @param boolean $css_mode Whether or not to generate strings safe for + * CSS classes/IDs (Default to false) + * @return string + */ + public static function slugify($string, $separator = '-', $css_mode = false) + { + // Compatibility with 1.0.* parameter ordering for semver + if ($separator === true || $separator === false) { + $css_mode = $separator; + $separator = '-'; + + // Raise deprecation error + trigger_error( + 'util::slugify() now takes $css_mode as the third parameter, please update your code', + E_USER_DEPRECATED + ); + } + + $slug = preg_replace('/([^a-z0-9]+)/', $separator, strtolower(self::remove_accents($string))); + + if ($css_mode) { + $digits = array('zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'); + + if (is_numeric(substr($slug, 0, 1))) { + $slug = $digits[substr($slug, 0, 1)] . substr($slug, 1); + } + } + + return $slug; + } + + /** + * Checks to see if a string is utf8 encoded. + * + * NOTE: This function checks for 5-Byte sequences, UTF8 + * has Bytes Sequences with a maximum length of 4. + * + * Written by Tony Ferrara + * + * @param string $string The string to be checked + * @return boolean + */ + public static function seems_utf8($string) + { + if (function_exists('mb_check_encoding')) { + // If mbstring is available, this is significantly faster than + // using PHP regexps. + return mb_check_encoding($string, 'UTF-8'); + } + + // @codeCoverageIgnoreStart + return self::seemsUtf8Regex($string); + // @codeCoverageIgnoreEnd + } + + /** + * A non-Mbstring UTF-8 checker. + * + * @param $string + * @return bool + */ + protected static function seemsUtf8Regex($string) + { + // Obtained from http://stackoverflow.com/a/11709412/430062 with permission. + $regex = '/( + [\xC0-\xC1] # Invalid UTF-8 Bytes + | [\xF5-\xFF] # Invalid UTF-8 Bytes + | \xE0[\x80-\x9F] # Overlong encoding of prior code point + | \xF0[\x80-\x8F] # Overlong encoding of prior code point + | [\xC2-\xDF](?![\x80-\xBF]) # Invalid UTF-8 Sequence Start + | [\xE0-\xEF](?![\x80-\xBF]{2}) # Invalid UTF-8 Sequence Start + | [\xF0-\xF4](?![\x80-\xBF]{3}) # Invalid UTF-8 Sequence Start + | (?<=[\x0-\x7F\xF5-\xFF])[\x80-\xBF] # Invalid UTF-8 Sequence Middle + | (? + * + * @param string $brokenSerializedData + * @return string + */ + public static function fix_broken_serialization($brokenSerializedData) + { + $fixdSerializedData = preg_replace_callback('!s:(\d+):"(.*?)";!', function($matches) { + $snip = $matches[2]; + return 's:' . strlen($snip) . ':"' . $snip . '";'; + }, $brokenSerializedData); + + return $fixdSerializedData; + } + + /** + * Checks to see if the page is being server over SSL or not + * + * @return boolean + */ + public static function is_https() + { + return isset($_SERVER['HTTPS']) && !empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] != 'off'; + } + + /** + * Add or remove query arguments to the URL. + * + * @param mixed $newKey Either newkey or an associative array + * @param mixed $newValue Either newvalue or oldquery or uri + * @param mixed $uri URI or URL to append the queru/queries to. + * @return string + */ + public static function add_query_arg($newKey, $newValue = null, $uri = null) + { + // Was an associative array of key => value pairs passed? + if (is_array($newKey)) { + $newParams = $newKey; + + // Was the URL passed as an argument? + if (!is_null($newValue)) { + $uri = $newValue; + } elseif (!is_null($uri)) { + $uri = $uri; + } else { + $uri = self::array_get($_SERVER['REQUEST_URI'], ''); + } + } else { + $newParams = array($newKey => $newValue); + + // Was the URL passed as an argument? + $uri = is_null($uri) ? self::array_get($_SERVER['REQUEST_URI'], '') : $uri; + } + + // Parse the URI into it's components + $puri = parse_url($uri); + + if (isset($puri['query'])) { + parse_str($puri['query'], $queryParams); + $queryParams = array_merge($queryParams, $newParams); + } elseif (isset($puri['path']) && strstr($puri['path'], '=') !== false) { + $puri['query'] = $puri['path']; + unset($puri['path']); + parse_str($puri['query'], $queryParams); + $queryParams = array_merge($queryParams, $newParams); + } else { + $queryParams = $newParams; + } + + // Strip out any query params that are set to false. + // Properly handle valueless parameters. + foreach ($queryParams as $param => $value) { + if ($value === false) { + unset($queryParams[$param]); + } elseif ($value === null) { + $queryParams[$param] = ''; + } + } + + // Re-construct the query string + $puri['query'] = http_build_query($queryParams); + + // Strip = from valueless parameters. + $puri['query'] = preg_replace('/=(?=&|$)/', '', $puri['query']); + + + // Re-construct the entire URL + $nuri = self::http_build_url($puri); + + // Make the URI consistent with our input + if ($nuri[0] === '/' && strstr($uri, '/') === false) { + $nuri = substr($nuri, 1); + } + + if ($nuri[0] === '?' && strstr($uri, '?') === false) { + $nuri = substr($nuri, 1); + } + + return rtrim($nuri, '?'); + } + + /** + * Removes an item or list from the query string. + * + * @param string|array $keys Query key or keys to remove. + * @param bool $uri When false uses the $_SERVER value + * @return string + */ + public static function remove_query_arg($keys, $uri = null) + { + if (is_array($keys)) { + return self::add_query_arg(array_combine($keys, array_fill(0, count($keys), false)), $uri); + } + + return self::add_query_arg(array($keys => false), $uri); + } + + /** + * Build a URL. + * + * The parts of the second URL will be merged into the first according to + * the flags argument. + * + * @author Jake Smith + * @see https://github.com/jakeasmith/http_build_url/ + * + * @param mixed $url (part(s) of) an URL in form of a string or + * associative array like parse_url() returns + * @param mixed $parts same as the first argument + * @param int $flags a bitmask of binary or'ed HTTP_URL constants; + * HTTP_URL_REPLACE is the default + * @param array $new_url if set, it will be filled with the parts of the + * composed url like parse_url() would return + * @return string + */ + public static function http_build_url($url, $parts = array(), $flags = self::HTTP_URL_REPLACE, &$new_url = array()) + { + is_array($url) || $url = parse_url($url); + is_array($parts) || $parts = parse_url($parts); + + isset($url['query']) && is_string($url['query']) || $url['query'] = null; + isset($parts['query']) && is_string($parts['query']) || $parts['query'] = null; + + $keys = array('user', 'pass', 'port', 'path', 'query', 'fragment'); + + // HTTP_URL_STRIP_ALL and HTTP_URL_STRIP_AUTH cover several other flags. + if ($flags & self::HTTP_URL_STRIP_ALL) { + $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS + | self::HTTP_URL_STRIP_PORT | self::HTTP_URL_STRIP_PATH + | self::HTTP_URL_STRIP_QUERY | self::HTTP_URL_STRIP_FRAGMENT; + } elseif ($flags & self::HTTP_URL_STRIP_AUTH) { + $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS; + } + + // Schema and host are alwasy replaced + foreach (array('scheme', 'host') as $part) { + if (isset($parts[$part])) { + $url[$part] = $parts[$part]; + } + } + + if ($flags & self::HTTP_URL_REPLACE) { + foreach ($keys as $key) { + if (isset($parts[$key])) { + $url[$key] = $parts[$key]; + } + } + } else { + if (isset($parts['path']) && ($flags & self::HTTP_URL_JOIN_PATH)) { + if (isset($url['path']) && substr($parts['path'], 0, 1) !== '/') { + $url['path'] = rtrim( + str_replace(basename($url['path']), '', $url['path']), + '/' + ) . '/' . ltrim($parts['path'], '/'); + } else { + $url['path'] = $parts['path']; + } + } + + if (isset($parts['query']) && ($flags & self::HTTP_URL_JOIN_QUERY)) { + if (isset($url['query'])) { + parse_str($url['query'], $url_query); + parse_str($parts['query'], $parts_query); + + $url['query'] = http_build_query( + array_replace_recursive( + $url_query, + $parts_query + ) + ); + } else { + $url['query'] = $parts['query']; + } + } + } + + if (isset($url['path']) && substr($url['path'], 0, 1) !== '/') { + $url['path'] = '/' . $url['path']; + } + + foreach ($keys as $key) { + $strip = 'HTTP_URL_STRIP_' . strtoupper($key); + if ($flags & constant('utilphp\\util::' . $strip)) { + unset($url[$key]); + } + } + + $parsed_string = ''; + + if (isset($url['scheme'])) { + $parsed_string .= $url['scheme'] . '://'; + } + + if (isset($url['user'])) { + $parsed_string .= $url['user']; + + if (isset($url['pass'])) { + $parsed_string .= ':' . $url['pass']; + } + + $parsed_string .= '@'; + } + + if (isset($url['host'])) { + $parsed_string .= $url['host']; + } + + if (isset($url['port'])) { + $parsed_string .= ':' . $url['port']; + } + + if (!empty($url['path'])) { + $parsed_string .= $url['path']; + } else { + $parsed_string .= '/'; + } + + if (isset($url['query'])) { + $parsed_string .= '?' . $url['query']; + } + + if (isset($url['fragment'])) { + $parsed_string .= '#' . $url['fragment']; + } + + $new_url = $url; + + return $parsed_string; + } + + /** + * Converts many english words that equate to true or false to boolean. + * + * Supports 'y', 'n', 'yes', 'no' and a few other variations. + * + * @param string $string The string to convert to boolean + * @param bool $default The value to return if we can't match any + * yes/no words + * @return boolean + */ + public static function str_to_bool($string, $default = false) + { + $yes_words = 'affirmative|all right|aye|indubitably|most assuredly|ok|of course|okay|sure thing|y|yes+|yea|yep|sure|yeah|true|t|on|1|oui|vrai'; + $no_words = 'no*|no way|nope|nah|na|never|absolutely not|by no means|negative|never ever|false|f|off|0|non|faux'; + + if (preg_match('/^(' . $yes_words . ')$/i', $string)) { + return true; + } elseif (preg_match('/^(' . $no_words . ')$/i', $string)) { + return false; + } + + return $default; + } + + /** + * Check if a string starts with the given string. + * + * @param string $string + * @param string $starts_with + * @return boolean + */ + public static function starts_with($string, $starts_with) + { + return strpos($string, $starts_with) === 0; + } + + /** + * Check if a string ends with the given string. + * + * @param string $string + * @param string $starts_with + * @return boolean + */ + public static function ends_with($string, $ends_with) + { + return substr($string, -strlen($ends_with)) === $ends_with; + } + + /** + * Check if a string contains another string. + * + * @param string $haystack + * @param string $needle + * @return boolean + */ + public static function str_contains($haystack, $needle) + { + return strpos($haystack, $needle) !== false; + } + + /** + * Check if a string contains another string. This version is case + * insensitive. + * + * @param string $haystack + * @param string $needle + * @return boolean + */ + public static function str_icontains($haystack, $needle) + { + return stripos($haystack, $needle) !== false; + } + + /** + * Return the file extension of the given filename. + * + * @param string $filename + * @return string + */ + public static function get_file_ext($filename) + { + return pathinfo($filename, PATHINFO_EXTENSION); + } + + /** + * Removes a directory (and its contents) recursively. + * + * Contributed by Askar (ARACOOL) + * + * @param string $dir The directory to be deleted recursively + * @param bool $traverseSymlinks Delete contents of symlinks recursively + * @return bool + * @throws \RuntimeException + */ + public static function rmdir($dir, $traverseSymlinks = false) + { + if (!file_exists($dir)) { + return true; + } elseif (!is_dir($dir)) { + throw new \RuntimeException('Given path is not a directory'); + } + + if (!is_link($dir) || $traverseSymlinks) { + foreach (scandir($dir) as $file) { + if ($file === '.' || $file === '..') { + continue; + } + + $currentPath = $dir . '/' . $file; + + if (is_dir($currentPath)) { + self::rmdir($currentPath, $traverseSymlinks); + } elseif (!unlink($currentPath)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $currentPath); + // @codeCoverageIgnoreEnd + } + } + } + + // Windows treats removing directory symlinks identically to removing directories. + if (is_link($dir) && !defined('PHP_WINDOWS_VERSION_MAJOR')) { + if (!unlink($dir)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $dir); + // @codeCoverageIgnoreEnd + } + } else { + if (!rmdir($dir)) { + // @codeCoverageIgnoreStart + throw new \RuntimeException('Unable to delete ' . $dir); + // @codeCoverageIgnoreEnd + } + } + + return true; + } + + /** + * Convert entities, while preserving already-encoded entities. + * + * @param string $string The text to be converted + * @return string + */ + public static function htmlentities($string, $preserve_encoded_entities = false) + { + if ($preserve_encoded_entities) { + // @codeCoverageIgnoreStart + if (defined('HHVM_VERSION')) { + $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES); + } else { + $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES, self::mbInternalEncoding()); + } + // @codeCoverageIgnoreEnd + + $translation_table[chr(38)] = '&'; + return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); + } + + return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); + } + + /** + * Convert >, <, ', " and & to html entities, but preserves entities that + * are already encoded. + * + * @param string $string The text to be converted + * @return string + */ + public static function htmlspecialchars($string, $preserve_encoded_entities = false) + { + if ($preserve_encoded_entities) { + // @codeCoverageIgnoreStart + if (defined('HHVM_VERSION')) { + $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES); + } else { + $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES, self::mbInternalEncoding()); + } + // @codeCoverageIgnoreEnd + + $translation_table[chr(38)] = '&'; + + return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); + } + + return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); + } + + /** + * Transliterates characters to their ASCII equivalents. + * + * Part of the URLify.php Project + * + * @see https://github.com/jbroadway/urlify/blob/master/URLify.php + * + * @param string $text Text that might have not-ASCII characters + * @param string $language Specifies a priority for a specific language. + * @return string Filtered string with replaced "nice" characters + */ + public static function downcode($text, $language = '') + { + self::initLanguageMap($language); + + if (self::seems_utf8($text)) { + if (preg_match_all(self::$regex, $text, $matches)) { + for ($i = 0; $i < count($matches[0]); $i++) { + $char = $matches[0][$i]; + if (isset(self::$map[$char])) { + $text = str_replace($char, self::$map[$char], $text); + } + } + } + } else { + // Not a UTF-8 string so we assume its ISO-8859-1 + $search = "\x80\x83\x8a\x8e\x9a\x9e\x9f\xa2\xa5\xb5\xc0\xc1\xc2\xc3\xc4\xc5\xc7\xc8\xc9\xca\xcb\xcc\xcd"; + $search .= "\xce\xcf\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xe0\xe1\xe2\xe3\xe4\xe5\xe7\xe8\xe9"; + $search .= "\xea\xeb\xec\xed\xee\xef\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xff"; + $text = strtr($text, $search, 'EfSZszYcYuAAAAAACEEEEIIIINOOOOOOUUUUYaaaaaaceeeeiiiinoooooouuuuyy'); + + // These latin characters should be represented by two characters so + // we can't use strtr + $complexSearch = array("\x8c", "\x9c", "\xc6", "\xd0", "\xde", "\xdf", "\xe6", "\xf0", "\xfe"); + $complexReplace = array('OE', 'oe', 'AE', 'DH', 'TH', 'ss', 'ae', 'dh', 'th'); + $text = str_replace($complexSearch, $complexReplace, $text); + } + + return $text; + } + + /** + * Converts all accent characters to ASCII characters. + * + * If there are no accent characters, then the string given is just + * returned. + * + * @param string $string Text that might have accent characters + * @param string $language Specifies a priority for a specific language. + * @return string Filtered string with replaced "nice" characters + */ + public static function remove_accents($string, $language = '') + { + if (!preg_match('/[\x80-\xff]/', $string)) { + return $string; + } + + return self::downcode($string, $language); + } + + /** + * Strip all witespaces from the given string. + * + * @param string $string The string to strip + * @return string + */ + public static function strip_space($string) + { + return preg_replace('/\s+/', '', $string); + } + + /** + * Sanitize a string by performing the following operation : + * - Remove accents + * - Lower the string + * - Remove punctuation characters + * - Strip whitespaces + * + * @param string $string the string to sanitize + * @return string + */ + public static function sanitize_string($string) + { + $string = self::remove_accents($string); + $string = strtolower($string); + $string = preg_replace('/[^a-zA-Z 0-9]+/', '', $string); + $string = self::strip_space($string); + + return $string; + } + + /** + * Pads a given string with zeroes on the left. + * + * @param int $number The number to pad + * @param int $length The total length of the desired string + * @return string + */ + public static function zero_pad($number, $length) + { + return str_pad($number, $length, '0', STR_PAD_LEFT); + } + + /** + * Converts a unix timestamp to a relative time string, such as "3 days ago" + * or "2 weeks ago". + * + * @param int $from The date to use as a starting point + * @param int $to The date to compare to, defaults to now + * @param string $suffix The string to add to the end, defaults to " ago" + * @return string + */ + public static function human_time_diff($from, $to = '', $as_text = false, $suffix = ' ago') + { + if ($to == '') { + $to = time(); + } + + $from = new \DateTime(date('Y-m-d H:i:s', $from)); + $to = new \DateTime(date('Y-m-d H:i:s', $to)); + $diff = $from->diff($to); + + if ($diff->y > 1) { + $text = $diff->y . ' years'; + } elseif ($diff->y == 1) { + $text = '1 year'; + } elseif ($diff->m > 1) { + $text = $diff->m . ' months'; + } elseif ($diff->m == 1) { + $text = '1 month'; + } elseif ($diff->d > 7) { + $text = ceil($diff->d / 7) . ' weeks'; + } elseif ($diff->d == 7) { + $text = '1 week'; + } elseif ($diff->d > 1) { + $text = $diff->d . ' days'; + } elseif ($diff->d == 1) { + $text = '1 day'; + } elseif ($diff->h > 1) { + $text = $diff->h . ' hours'; + } elseif ($diff->h == 1) { + $text = ' 1 hour'; + } elseif ($diff->i > 1) { + $text = $diff->i . ' minutes'; + } elseif ($diff->i == 1) { + $text = '1 minute'; + } elseif ($diff->s > 1) { + $text = $diff->s . ' seconds'; + } else { + $text = '1 second'; + } + + if ($as_text) { + $text = explode(' ', $text, 2); + $text = self::number_to_word($text[0]) . ' ' . $text[1]; + } + + return trim($text) . $suffix; + } + + /** + * Converts a number into the text equivalent. For example, 456 becomes four + * hundred and fifty-six. + * + * Part of the IntToWords Project. + * + * @param int|float $number The number to convert into text + * @return string + */ + public static function number_to_word($number) + { + $number = (string) $number; + + if (strpos($number, '.') !== false) { + list($number, $decimal) = explode('.', $number); + } else { + $decimal = false; + } + + $output = ''; + + if ($number[0] == '-') { + $output = 'negative '; + $number = ltrim($number, '-'); + } elseif ($number[0] == '+') { + $output = 'positive '; + $number = ltrim($number, '+'); + } + + if ($number[0] == '0') { + $output .= 'zero'; + } else { + $length = 19; + $number = str_pad($number, 60, '0', STR_PAD_LEFT); + $group = rtrim(chunk_split($number, 3, ' '), ' '); + $groups = explode(' ', $group); + $groups2 = array(); + + foreach ($groups as $group) { + $group[1] = isset($group[1]) ? $group[1] : null; + $group[2] = isset($group[2]) ? $group[2] : null; + $groups2[] = self::numberToWordThreeDigits($group[0], $group[1], $group[2]); + } + + for ($z = 0; $z < count($groups2); $z++) { + if ($groups2[$z] != '') { + $output .= $groups2[$z] . self::numberToWordConvertGroup($length - $z); + $output .= ($z < $length && ! array_search('', array_slice($groups2, $z + 1, -1)) && $groups2[$length] != '' && $groups[$length][0] == '0' ? ' and ' : ', '); + } + } + + $output = rtrim($output, ', '); + } + + if ($decimal > 0) { + $output .= ' point'; + + for ($i = 0; $i < strlen($decimal); $i++) { + $output .= ' ' . self::numberToWordConvertDigit($decimal[$i]); + } + } + + return $output; + } + + protected static function numberToWordConvertGroup($index) + { + switch($index) { + case 11: + return ' decillion'; + case 10: + return ' nonillion'; + case 9: + return ' octillion'; + case 8: + return ' septillion'; + case 7: + return ' sextillion'; + case 6: + return ' quintrillion'; + case 5: + return ' quadrillion'; + case 4: + return ' trillion'; + case 3: + return ' billion'; + case 2: + return ' million'; + case 1: + return ' thousand'; + case 0: + return ''; + } + + return ''; + } + + protected static function numberToWordThreeDigits($digit1, $digit2, $digit3) + { + $output = ''; + + if ($digit1 == '0' && $digit2 == '0' && $digit3 == '0') { + return ''; + } + + if ($digit1 != '0') { + $output .= self::numberToWordConvertDigit($digit1) . ' hundred'; + + if ($digit2 != '0' || $digit3 != '0') { + $output .= ' and '; + } + } + if ($digit2 != '0') { + $output .= self::numberToWordTwoDigits($digit2, $digit3); + } elseif ($digit3 != '0') { + $output .= self::numberToWordConvertDigit($digit3); + } + + return $output; + } + + protected static function numberToWordTwoDigits($digit1, $digit2) + { + if ($digit2 == '0') { + switch ($digit1) { + case '1': + return 'ten'; + case '2': + return 'twenty'; + case '3': + return 'thirty'; + case '4': + return 'forty'; + case '5': + return 'fifty'; + case '6': + return 'sixty'; + case '7': + return 'seventy'; + case '8': + return 'eighty'; + case '9': + return 'ninety'; + } + } elseif ($digit1 == '1') { + switch ($digit2) { + case '1': + return 'eleven'; + case '2': + return 'twelve'; + case '3': + return 'thirteen'; + case '4': + return 'fourteen'; + case '5': + return 'fifteen'; + case '6': + return 'sixteen'; + case '7': + return 'seventeen'; + case '8': + return 'eighteen'; + case '9': + return 'nineteen'; + } + } else { + $second_digit = self::numberToWordConvertDigit($digit2); + + switch ($digit1) { + case '2': + return "twenty-{$second_digit}"; + case '3': + return "thirty-{$second_digit}"; + case '4': + return "forty-{$second_digit}"; + case '5': + return "fifty-{$second_digit}"; + case '6': + return "sixty-{$second_digit}"; + case '7': + return "seventy-{$second_digit}"; + case '8': + return "eighty-{$second_digit}"; + case '9': + return "ninety-{$second_digit}"; + } + } + } + + /** + * @param $digit + * @return string + * @throws \LogicException + */ + protected static function numberToWordConvertDigit($digit) + { + switch ($digit) { + case '0': + return 'zero'; + case '1': + return 'one'; + case '2': + return 'two'; + case '3': + return 'three'; + case '4': + return 'four'; + case '5': + return 'five'; + case '6': + return 'six'; + case '7': + return 'seven'; + case '8': + return 'eight'; + case '9': + return 'nine'; + default: + throw new \LogicException('Not a number'); + } + } + + /** + * Calculates percentage of numerator and denominator. + * + * @param int|float $numerator + * @param int|float $denominator + * @param int $decimals + * @param string $dec_point + * @param string $thousands_sep + * @return int|float + */ + public static function calculate_percentage($numerator, $denominator, $decimals = 2, $dec_point = '.', $thousands_sep = ',') + { + return number_format(($numerator / $denominator) * 100, $decimals, $dec_point, $thousands_sep); + } + + /** + * Transmit UTF-8 content headers if the headers haven't already been sent. + * + * @param string $content_type The content type to send out + * @return boolean + */ + public static function utf8_headers($content_type = 'text/html') + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + header('Content-type: ' . $content_type . '; charset=utf-8'); + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Transmit headers that force a browser to display the download file + * dialog. Cross browser compatible. Only fires if headers have not + * already been sent. + * + * @param string $filename The name of the filename to display to + * browsers + * @param string $content The content to output for the download. + * If you don't specify this, just the + * headers will be sent + * @return boolean + */ + public static function force_download($filename, $content = false) + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + // Required for some browsers + if (ini_get('zlib.output_compression')) { + @ini_set('zlib.output_compression', 'Off'); + } + + header('Pragma: public'); + header('Expires: 0'); + header('Cache-Control: must-revalidate, post-check=0, pre-check=0'); + + // Required for certain browsers + header('Cache-Control: private', false); + + header('Content-Disposition: attachment; filename="' . basename(str_replace('"', '', $filename)) . '";'); + header('Content-Type: application/force-download'); + header('Content-Transfer-Encoding: binary'); + + if ($content) { + header('Content-Length: ' . strlen($content)); + } + + ob_clean(); + flush(); + + if ($content) { + echo $content; + } + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Sets the headers to prevent caching for the different browsers. + * + * Different browsers support different nocache headers, so several + * headers must be sent so that all of them get the point that no + * caching should occur + * + * @return boolean + */ + public static function nocache_headers() + { + // @codeCoverageIgnoreStart + if (!headers_sent()) { + header('Expires: Wed, 11 Jan 1984 05:00:00 GMT'); + header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT'); + header('Cache-Control: no-cache, must-revalidate, max-age=0'); + header('Pragma: no-cache'); + + return true; + } + + return false; + // @codeCoverageIgnoreEnd + } + + /** + * Generates a string of random characters. + * + * @throws LengthException If $length is bigger than the available + * character pool and $no_duplicate_chars is + * enabled + * + * @param integer $length The length of the string to + * generate + * @param boolean $human_friendly Whether or not to make the + * string human friendly by + * removing characters that can be + * confused with other characters ( + * O and 0, l and 1, etc) + * @param boolean $include_symbols Whether or not to include + * symbols in the string. Can not + * be enabled if $human_friendly is + * true + * @param boolean $no_duplicate_chars Whether or not to only use + * characters once in the string. + * @return string + */ + public static function random_string($length = 16, $human_friendly = true, $include_symbols = false, $no_duplicate_chars = false) + { + $nice_chars = 'ABCDEFGHJKLMNPQRSTUVWXYZabcdefhjkmnprstuvwxyz23456789'; + $all_an = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890'; + $symbols = '!@#$%^&*()~_-=+{}[]|:;<>,.?/"\'\\`'; + $string = ''; + + // Determine the pool of available characters based on the given parameters + if ($human_friendly) { + $pool = $nice_chars; + } else { + $pool = $all_an; + + if ($include_symbols) { + $pool .= $symbols; + } + } + + if (!$no_duplicate_chars) { + return substr(str_shuffle(str_repeat($pool, $length)), 0, $length); + } + + // Don't allow duplicate letters to be disabled if the length is + // longer than the available characters + if ($no_duplicate_chars && strlen($pool) < $length) { + throw new \LengthException('$length exceeds the size of the pool and $no_duplicate_chars is enabled'); + } + + // Convert the pool of characters into an array of characters and + // shuffle the array + $pool = str_split($pool); + $poolLength = count($pool); + $rand = mt_rand(0, $poolLength - 1); + + // Generate our string + for ($i = 0; $i < $length; $i++) { + $string .= $pool[$rand]; + + // Remove the character from the array to avoid duplicates + array_splice($pool, $rand, 1); + + // Generate a new number + if (($poolLength - 2 - $i) > 0) { + $rand = mt_rand(0, $poolLength - 2 - $i); + } else { + $rand = 0; + } + } + + return $string; + } + + /** + * Generate secure random string of given length + * If 'openssl_random_pseudo_bytes' is not available + * then generate random string using default function + * + * Part of the Laravel Project + * + * @param int $length length of string + * @return bool + */ + public static function secure_random_string($length = 16) + { + if (function_exists('openssl_random_pseudo_bytes')) { + $bytes = openssl_random_pseudo_bytes($length * 2); + + if ($bytes === false) { + throw new \LengthException('$length is not accurate, unable to generate random string'); + } + + return substr(str_replace(array('/', '+', '='), '', base64_encode($bytes)), 0, $length); + } + + // @codeCoverageIgnoreStart + return static::random_string($length); + // @codeCoverageIgnoreEnd + } + + /** + * Check if a given string matches a given pattern. + * + * Contributed by Abhimanyu Sharma + * + * @param string $pattern Parttern of string exptected + * @param string $string String that need to be matched + * @return bool + */ + public static function match_string($pattern, $string, $caseSensitive = true) + { + if ($pattern == $string) { + return true; + } + + // Preg flags + $flags = $caseSensitive ? '' : 'i'; + + // Escape any regex special characters + $pattern = preg_quote($pattern, '#'); + + // Unescape * which is our wildcard character and change it to .* + $pattern = str_replace('\*', '.*', $pattern); + + return (bool) preg_match('#^' . $pattern . '$#' . $flags, $string); + } + + /** + * Validate an email address. + * + * @param string $possible_email An email address to validate + * @return bool + */ + public static function validate_email($possible_email) + { + return (bool) filter_var($possible_email, FILTER_VALIDATE_EMAIL); + } + + /** + * Return the URL to a user's gravatar. + * + * @param string $email The email of the user + * @param integer $size The size of the gravatar + * @return string + */ + public static function get_gravatar($email, $size = 32) + { + if (self::is_https()) { + $url = 'https://secure.gravatar.com/'; + } else { + $url = 'http://www.gravatar.com/'; + } + + $url .= 'avatar/' . md5($email) . '?s=' . (int) abs($size); + + return $url; + } + + /** + * Turns all of the links in a string into HTML links. + * + * Part of the LinkifyURL Project + * + * @param string $text The string to parse + * @return string + */ + public static function linkify($text) + { + $text = preg_replace('/'/', ''', $text); // IE does not handle ' entity! + $section_html_pattern = '%# Rev:20100913_0900 github.com/jmrware/LinkifyURL + # Section text into HTML tags and everything else. + ( # $1: Everything not HTML tag. + [^<]+(?:(?!... tag. + ]*> # opening tag. + [^<]*(?:(?! # closing tag. + ) # End $2: + %ix'; + + return preg_replace_callback($section_html_pattern, array(__CLASS__, 'linkifyCallback'), $text); + } + + /** + * Callback for the preg_replace in the linkify() method. + * + * Part of the LinkifyURL Project + * + * @param array $matches Matches from the preg_ function + * @return string + */ + protected static function linkifyRegex($text) + { + $url_pattern = '/# Rev:20100913_0900 github.com\/jmrware\/LinkifyURL + # Match http & ftp URL that is not already linkified. + # Alternative 1: URL delimited by (parentheses). + (\() # $1 "(" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $2: URL. + (\)) # $3: ")" end delimiter. + | # Alternative 2: URL delimited by [square brackets]. + (\[) # $4: "[" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $5: URL. + (\]) # $6: "]" end delimiter. + | # Alternative 3: URL delimited by {curly braces}. + (\{) # $7: "{" start delimiter. + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $8: URL. + (\}) # $9: "}" end delimiter. + | # Alternative 4: URL delimited by . + (<|&(?:lt|\#60|\#x3c);) # $10: "<" start delimiter (or HTML entity). + ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $11: URL. + (>|&(?:gt|\#62|\#x3e);) # $12: ">" end delimiter (or HTML entity). + | # Alternative 5: URL not delimited by (), [], {} or <>. + (# $13: Prefix proving URL not already linked. + (?: ^ # Can be a beginning of line or string, or + | [^=\s\'"\]] # a non-"=", non-quote, non-"]", followed by + ) \s*[\'"]? # optional whitespace and optional quote; + | [^=\s]\s+ # or... a non-equals sign followed by whitespace. + ) # End $13. Non-prelinkified-proof prefix. + (\b # $14: Other non-delimited URL. + (?:ht|f)tps?:\/\/ # Required literal http, https, ftp or ftps prefix. + [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]+ # All URI chars except "&" (normal*). + (?: # Either on a "&" or at the end of URI. + (?! # Allow a "&" char only if not start of an... + &(?:gt|\#0*62|\#x0*3e); # HTML ">" entity, or + | &(?:amp|apos|quot|\#0*3[49]|\#x0*2[27]); # a [&\'"] entity if + [.!&\',:?;]? # followed by optional punctuation then + (?:[^a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]|$) # a non-URI char or EOS. + ) & # If neg-assertion true, match "&" (special). + [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]* # More non-& URI chars (normal*). + )* # Unroll-the-loop (special normal*)*. + [a-z0-9\-_~$()*+=\/#[\]@%] # Last char can\'t be [.!&\',;:?] + ) # End $14. Other non-delimited URL. + /imx'; + + $url_replace = '$1$4$7$10$13$2$5$8$11$14$3$6$9$12'; + + return preg_replace($url_pattern, $url_replace, $text); + } + + /** + * Callback for the preg_replace in the linkify() method. + * + * Part of the LinkifyURL Project + * + * @param array $matches Matches from the preg_ function + * @return string + */ + protected static function linkifyCallback($matches) + { + if (isset($matches[2])) { + return $matches[2]; + } + + return self::linkifyRegex($matches[1]); + } + + /** + * Return the current URL. + * + * @return string + */ + public static function get_current_url() + { + $url = ''; + + // Check to see if it's over https + $is_https = self::is_https(); + if ($is_https) { + $url .= 'https://'; + } else { + $url .= 'http://'; + } + + // Was a username or password passed? + if (isset($_SERVER['PHP_AUTH_USER'])) { + $url .= $_SERVER['PHP_AUTH_USER']; + + if (isset($_SERVER['PHP_AUTH_PW'])) { + $url .= ':' . $_SERVER['PHP_AUTH_PW']; + } + + $url .= '@'; + } + + + // We want the user to stay on the same host they are currently on, + // but beware of security issues + // see http://shiflett.org/blog/2006/mar/server-name-versus-http-host + $url .= $_SERVER['HTTP_HOST']; + + $port = $_SERVER['SERVER_PORT']; + + // Is it on a non standard port? + if ($is_https && ($port != 443)) { + $url .= ':' . $_SERVER['SERVER_PORT']; + } elseif (!$is_https && ($port != 80)) { + $url .= ':' . $_SERVER['SERVER_PORT']; + } + + // Get the rest of the URL + if (!isset($_SERVER['REQUEST_URI'])) { + // Microsoft IIS doesn't set REQUEST_URI by default + $url .= $_SERVER['PHP_SELF']; + + if (isset($_SERVER['QUERY_STRING'])) { + $url .= '?' . $_SERVER['QUERY_STRING']; + } + } else { + $url .= $_SERVER['REQUEST_URI']; + } + + return $url; + } + + /** + * Returns the IP address of the client. + * + * @param boolean $trust_proxy_headers Whether or not to trust the + * proxy headers HTTP_CLIENT_IP + * and HTTP_X_FORWARDED_FOR. ONLY + * use if your server is behind a + * proxy that sets these values + * @return string + */ + public static function get_client_ip($trust_proxy_headers = false) + { + if (!$trust_proxy_headers) { + return $_SERVER['REMOTE_ADDR']; + } + + if (!empty($_SERVER['HTTP_CLIENT_IP'])) { + $ip = $_SERVER['HTTP_CLIENT_IP']; + } elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { + $ip = $_SERVER['HTTP_X_FORWARDED_FOR']; + } else { + $ip = $_SERVER['REMOTE_ADDR']; + } + + return $ip; + } + + /** + * Truncate a string to a specified length without cutting a word off. + * + * @param string $string The string to truncate + * @param integer $length The length to truncate the string to + * @param string $append Text to append to the string IF it gets + * truncated, defaults to '...' + * @return string + */ + public static function safe_truncate($string, $length, $append = '...') + { + $ret = substr($string, 0, $length); + $last_space = strrpos($ret, ' '); + + if ($last_space !== false && $string != $ret) { + $ret = substr($ret, 0, $last_space); + } + + if ($ret != $string) { + $ret .= $append; + } + + return $ret; + } + + + /** + * Truncate the string to given length of characters. + * + * @param string $string The variable to truncate + * @param integer $limit The length to truncate the string to + * @param string $append Text to append to the string IF it gets + * truncated, defaults to '...' + * @return string + */ + public static function limit_characters($string, $limit = 100, $append = '...') + { + if (mb_strlen($string) <= $limit) { + return $string; + } + + return rtrim(mb_substr($string, 0, $limit, 'UTF-8')) . $append; + } + + /** + * Truncate the string to given length of words. + * + * @param $string + * @param $limit + * @param string $append + * @return string + */ + public static function limit_words($string, $limit = 100, $append = '...') + { + preg_match('/^\s*+(?:\S++\s*+){1,' . $limit . '}/u', $string, $matches); + + if (!isset($matches[0]) || strlen($string) === strlen($matches[0])) { + return $string; + } + + return rtrim($matches[0]).$append; + } + + /** + * Returns the ordinal version of a number (appends th, st, nd, rd). + * + * @param string $number The number to append an ordinal suffix to + * @return string + */ + public static function ordinal($number) + { + $test_c = abs($number) % 10; + $ext = ((abs($number) % 100 < 21 && abs($number) % 100 > 4) ? 'th' : (($test_c < 4) ? ($test_c < 3) ? ($test_c < 2) ? ($test_c < 1) ? 'th' : 'st' : 'nd' : 'rd' : 'th')); + + return $number . $ext; + } + + /** + * Returns the file permissions as a nice string, like -rw-r--r-- or false + * if the file is not found. + * + * @param string $file The name of the file to get permissions form + * @param int $perms Numerical value of permissions to display as text. + * @return string + */ + public static function full_permissions($file, $perms = null) + { + if (is_null($perms)) { + if (!file_exists($file)) { + return false; + } + $perms = fileperms($file); + } + + if (($perms & 0xC000) == 0xC000) { + // Socket + $info = 's'; + } elseif (($perms & 0xA000) == 0xA000) { + // Symbolic Link + $info = 'l'; + } elseif (($perms & 0x8000) == 0x8000) { + // Regular + $info = '-'; + } elseif (($perms & 0x6000) == 0x6000) { + // Block special + $info = 'b'; + } elseif (($perms & 0x4000) == 0x4000) { + // Directory + $info = 'd'; + } elseif (($perms & 0x2000) == 0x2000) { + // Character special + $info = 'c'; + } elseif (($perms & 0x1000) == 0x1000) { + // FIFO pipe + $info = 'p'; + } else { + // Unknown + $info = 'u'; + } + + // Owner + $info .= (($perms & 0x0100) ? 'r' : '-'); + $info .= (($perms & 0x0080) ? 'w' : '-'); + $info .= (($perms & 0x0040) ? + (($perms & 0x0800) ? 's' : 'x') : + (($perms & 0x0800) ? 'S' : '-')); + + // Group + $info .= (($perms & 0x0020) ? 'r' : '-'); + $info .= (($perms & 0x0010) ? 'w' : '-'); + $info .= (($perms & 0x0008) ? + (($perms & 0x0400) ? 's' : 'x') : + (($perms & 0x0400) ? 'S' : '-')); + + // World + $info .= (($perms & 0x0004) ? 'r' : '-'); + $info .= (($perms & 0x0002) ? 'w' : '-'); + $info .= (($perms & 0x0001) ? + (($perms & 0x0200) ? 't' : 'x') : + (($perms & 0x0200) ? 'T' : '-')); + + return $info; + } + + /** + * Returns the first element in an array. + * + * @param array $array + * @return mixed + */ + public static function array_first(array $array) + { + return reset($array); + } + + /** + * Returns the last element in an array. + * + * @param array $array + * @return mixed + */ + public static function array_last(array $array) + { + return end($array); + } + + /** + * Returns the first key in an array. + * + * @param array $array + * @return int|string + */ + public static function array_first_key(array $array) + { + reset($array); + + return key($array); + } + + /** + * Returns the last key in an array. + * + * @param array $array + * @return int|string + */ + public static function array_last_key(array $array) + { + end($array); + + return key($array); + } + + /** + * Flatten a multi-dimensional array into a one dimensional array. + * + * Contributed by Theodore R. Smith of PHP Experts, Inc. + * + * @param array $array The array to flatten + * @param boolean $preserve_keys Whether or not to preserve array keys. + * Keys from deeply nested arrays will + * overwrite keys from shallowy nested arrays + * @return array + */ + public static function array_flatten(array $array, $preserve_keys = true) + { + $flattened = array(); + + array_walk_recursive($array, function($value, $key) use (&$flattened, $preserve_keys) { + if ($preserve_keys && !is_int($key)) { + $flattened[$key] = $value; + } else { + $flattened[] = $value; + } + }); + + return $flattened; + } + + /** + * Accepts an array, and returns an array of values from that array as + * specified by $field. For example, if the array is full of objects + * and you call util::array_pluck($array, 'name'), the function will + * return an array of values from $array[]->name. + * + * @param array $array An array + * @param string $field The field to get values from + * @param boolean $preserve_keys Whether or not to preserve the + * array keys + * @param boolean $remove_nomatches If the field doesn't appear to be set, + * remove it from the array + * @return array + */ + public static function array_pluck(array $array, $field, $preserve_keys = true, $remove_nomatches = true) + { + $new_list = array(); + + foreach ($array as $key => $value) { + if (is_object($value)) { + if (isset($value->{$field})) { + if ($preserve_keys) { + $new_list[$key] = $value->{$field}; + } else { + $new_list[] = $value->{$field}; + } + } elseif (!$remove_nomatches) { + $new_list[$key] = $value; + } + } else { + if (isset($value[$field])) { + if ($preserve_keys) { + $new_list[$key] = $value[$field]; + } else { + $new_list[] = $value[$field]; + } + } elseif (!$remove_nomatches) { + $new_list[$key] = $value; + } + } + } + + return $new_list; + } + + /** + * Searches for a given value in an array of arrays, objects and scalar + * values. You can optionally specify a field of the nested arrays and + * objects to search in. + * + * @param array $array The array to search + * @param scalar $search The value to search for + * @param string $field The field to search in, if not specified + * all fields will be searched + * @return boolean|scalar False on failure or the array key on success + */ + public static function array_search_deep(array $array, $search, $field = false) + { + // *grumbles* stupid PHP type system + $search = (string) $search; + + foreach ($array as $key => $elem) { + // *grumbles* stupid PHP type system + $key = (string) $key; + + if ($field) { + if (is_object($elem) && $elem->{$field} === $search) { + return $key; + } elseif (is_array($elem) && $elem[$field] === $search) { + return $key; + } elseif (is_scalar($elem) && $elem === $search) { + return $key; + } + } else { + if (is_object($elem)) { + $elem = (array) $elem; + + if (in_array($search, $elem)) { + return $key; + } + } elseif (is_array($elem) && in_array($search, $elem)) { + return $key; + } elseif (is_scalar($elem) && $elem === $search) { + return $key; + } + } + } + + return false; + } + + /** + * Returns an array containing all the elements of arr1 after applying + * the callback function to each one. + * + * @param string $callback Callback function to run for each + * element in each array + * @param array $array An array to run through the callback + * function + * @param boolean $on_nonscalar Whether or not to call the callback + * function on nonscalar values + * (Objects, resources, etc) + * @return array + */ + public static function array_map_deep(array $array, $callback, $on_nonscalar = false) + { + foreach ($array as $key => $value) { + if (is_array($value)) { + $args = array($value, $callback, $on_nonscalar); + $array[$key] = call_user_func_array(array(__CLASS__, __FUNCTION__), $args); + } elseif (is_scalar($value) || $on_nonscalar) { + $array[$key] = call_user_func($callback, $value); + } + } + + return $array; + } + + public static function array_clean(array $array) + { + return array_filter($array); + } + + /** + * Wrapper to prevent errors if the user doesn't have the mbstring + * extension installed. + * + * @param string $encoding + * @return string + */ + protected static function mbInternalEncoding($encoding = null) + { + if (function_exists('mb_internal_encoding')) { + return $encoding ? mb_internal_encoding($encoding) : mb_internal_encoding(); + } + + // @codeCoverageIgnoreStart + return 'UTF-8'; + // @codeCoverageIgnoreEnd + } + + /** + * Set the writable bit on a file to the minimum value that allows the user + * running PHP to write to it. + * + * @param string $filename The filename to set the writable bit on + * @param boolean $writable Whether to make the file writable or not + * @return boolean + */ + public static function set_writable($filename, $writable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($writable) { + // Set only the user writable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0200); + } + + // Set only the group writable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0220); + } + + // Set the world writable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0222); + } else { + // Set only the user writable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0222) ^ 0222); + } + + // Set only the group writable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0222) ^ 0022); + } + + // Set the world writable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0222) ^ 0002); + } + } + + /** + * Set the readable bit on a file to the minimum value that allows the user + * running PHP to read to it. + * + * @param string $filename The filename to set the readable bit on + * @param boolean $readable Whether to make the file readable or not + * @return boolean + */ + public static function set_readable($filename, $readable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($readable) { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0400); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0440); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0444); + } else { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0444) ^ 0444); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0444) ^ 0044); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0444) ^ 0004); + } + } + + /** + * Set the executable bit on a file to the minimum value that allows the + * user running PHP to read to it. + * + * @param string $filename The filename to set the executable bit on + * @param boolean $executable Whether to make the file executable or not + * @return boolean + */ + public static function set_executable($filename, $executable = true) + { + $stat = @stat($filename); + + if ($stat === false) { + return false; + } + + // We're on Windows + if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { + return true; + } + + list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); + + if ($executable) { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, fileperms($filename) | 0100); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, fileperms($filename) | 0110); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, fileperms($filename) | 0111); + } else { + // Set only the user readable bit (file is owned by us) + if ($stat['uid'] == $myuid) { + return chmod($filename, (fileperms($filename) | 0111) ^ 0111); + } + + // Set only the group readable bit (file group is the same as us) + if ($stat['gid'] == $mygid) { + return chmod($filename, (fileperms($filename) | 0111) ^ 0011); + } + + // Set the world readable bit (file isn't owned or grouped by us) + return chmod($filename, (fileperms($filename) | 0111) ^ 0001); + } + } + + /** + * Returns size of a given directory in bytes. + * + * @param string $dir + * @return integer + */ + public static function directory_size($dir) + { + $size = 0; + foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $file => $key) { + if ($key->isFile()) { + $size += $key->getSize(); + } + } + return $size; + } + + /** + * Returns a home directory of current user. + * + * @return string + */ + public static function get_user_directory() + { + if (isset($_SERVER['HOMEDRIVE'])) return $_SERVER['HOMEDRIVE'] . $_SERVER['HOMEPATH']; + else return $_SERVER['HOME']; + } + + /** + * Returns all paths inside a directory. + * + * @param string $dir + * @return array + */ + public static function directory_contents($dir) + { + $contents = array(); + foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::KEY_AS_PATHNAME | \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $pathname => $fi) { + $contents[] = $pathname; + } + natsort($contents); + return $contents; + } +} diff --git a/node_modules/npm-run-path/index.d.ts b/node_modules/npm-run-path/index.d.ts new file mode 100644 index 0000000..af10d41 --- /dev/null +++ b/node_modules/npm-run-path/index.d.ts @@ -0,0 +1,89 @@ +declare namespace npmRunPath { + interface RunPathOptions { + /** + Working directory. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key). + + Set it to an empty string to exclude the default PATH. + */ + readonly path?: string; + + /** + Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. + + This can be either an absolute path or a path relative to the `cwd` option. + + @default process.execPath + */ + readonly execPath?: string; + } + + interface ProcessEnv { + [key: string]: string | undefined; + } + + interface EnvOptions { + /** + Working directory. + + @default process.cwd() + */ + readonly cwd?: string; + + /** + Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. + */ + readonly env?: ProcessEnv; + + /** + Path to the current Node.js executable. Its directory is pushed to the front of PATH. + + This can be either an absolute path or a path relative to the `cwd` option. + + @default process.execPath + */ + readonly execPath?: string; + } +} + +declare const npmRunPath: { + /** + Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries. + + @returns The augmented path string. + + @example + ``` + import * as childProcess from 'child_process'; + import npmRunPath = require('npm-run-path'); + + console.log(process.env.PATH); + //=> '/usr/local/bin' + + console.log(npmRunPath()); + //=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' + + // `foo` is a locally installed binary + childProcess.execFileSync('foo', { + env: npmRunPath.env() + }); + ``` + */ + (options?: npmRunPath.RunPathOptions): string; + + /** + @returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. + */ + env(options?: npmRunPath.EnvOptions): npmRunPath.ProcessEnv; + + // TODO: Remove this for the next major release + default: typeof npmRunPath; +}; + +export = npmRunPath; diff --git a/node_modules/npm-run-path/index.js b/node_modules/npm-run-path/index.js new file mode 100644 index 0000000..8c94abc --- /dev/null +++ b/node_modules/npm-run-path/index.js @@ -0,0 +1,47 @@ +'use strict'; +const path = require('path'); +const pathKey = require('path-key'); + +const npmRunPath = options => { + options = { + cwd: process.cwd(), + path: process.env[pathKey()], + execPath: process.execPath, + ...options + }; + + let previous; + let cwdPath = path.resolve(options.cwd); + const result = []; + + while (previous !== cwdPath) { + result.push(path.join(cwdPath, 'node_modules/.bin')); + previous = cwdPath; + cwdPath = path.resolve(cwdPath, '..'); + } + + // Ensure the running `node` binary is used + const execPathDir = path.resolve(options.cwd, options.execPath, '..'); + result.push(execPathDir); + + return result.concat(options.path).join(path.delimiter); +}; + +module.exports = npmRunPath; +// TODO: Remove this for the next major release +module.exports.default = npmRunPath; + +module.exports.env = options => { + options = { + env: process.env, + ...options + }; + + const env = {...options.env}; + const path = pathKey({env}); + + options.path = env[path]; + env[path] = module.exports(options); + + return env; +}; diff --git a/node_modules/npm-run-path/license b/node_modules/npm-run-path/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/npm-run-path/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-run-path/package.json b/node_modules/npm-run-path/package.json new file mode 100644 index 0000000..feb8c00 --- /dev/null +++ b/node_modules/npm-run-path/package.json @@ -0,0 +1,44 @@ +{ + "name": "npm-run-path", + "version": "4.0.1", + "description": "Get your PATH prepended with locally installed binaries", + "license": "MIT", + "repository": "sindresorhus/npm-run-path", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "npm", + "run", + "path", + "package", + "bin", + "binary", + "binaries", + "script", + "cli", + "command-line", + "execute", + "executable" + ], + "dependencies": { + "path-key": "^3.0.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/npm-run-path/readme.md b/node_modules/npm-run-path/readme.md new file mode 100644 index 0000000..557fbeb --- /dev/null +++ b/node_modules/npm-run-path/readme.md @@ -0,0 +1,115 @@ +# npm-run-path [![Build Status](https://travis-ci.org/sindresorhus/npm-run-path.svg?branch=master)](https://travis-ci.org/sindresorhus/npm-run-path) + +> Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries + +In [npm run scripts](https://docs.npmjs.com/cli/run-script) you can execute locally installed binaries by name. This enables the same outside npm. + + +## Install + +``` +$ npm install npm-run-path +``` + + +## Usage + +```js +const childProcess = require('child_process'); +const npmRunPath = require('npm-run-path'); + +console.log(process.env.PATH); +//=> '/usr/local/bin' + +console.log(npmRunPath()); +//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' + +// `foo` is a locally installed binary +childProcess.execFileSync('foo', { + env: npmRunPath.env() +}); +``` + + +## API + +### npmRunPath(options?) + +Returns the augmented path string. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### path + +Type: `string`
+Default: [`PATH`](https://github.com/sindresorhus/path-key) + +PATH to be appended.
+Set it to an empty string to exclude the default PATH. + +##### execPath + +Type: `string`
+Default: `process.execPath` + +Path to the current Node.js executable. Its directory is pushed to the front of PATH. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + +### npmRunPath.env(options?) + +Returns the augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Working directory. + +##### env + +Type: `Object` + +Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. + +##### execPath + +Type: `string`
+Default: `process.execPath` + +Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. + +This can be either an absolute path or a path relative to the [`cwd` option](#cwd). + + +## Related + +- [npm-run-path-cli](https://github.com/sindresorhus/npm-run-path-cli) - CLI for this module +- [execa](https://github.com/sindresorhus/execa) - Execute a locally installed binary + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/onetime/index.d.ts b/node_modules/onetime/index.d.ts new file mode 100644 index 0000000..ea84cab --- /dev/null +++ b/node_modules/onetime/index.d.ts @@ -0,0 +1,64 @@ +declare namespace onetime { + interface Options { + /** + Throw an error when called more than once. + + @default false + */ + throw?: boolean; + } +} + +declare const onetime: { + /** + Ensure a function is only called once. When called multiple times it will return the return value from the first call. + + @param fn - Function that should only be called once. + @returns A function that only calls `fn` once. + + @example + ``` + import onetime = require('onetime'); + + let i = 0; + + const foo = onetime(() => ++i); + + foo(); //=> 1 + foo(); //=> 1 + foo(); //=> 1 + + onetime.callCount(foo); //=> 3 + ``` + */ + ( + fn: (...arguments: ArgumentsType) => ReturnType, + options?: onetime.Options + ): (...arguments: ArgumentsType) => ReturnType; + + /** + Get the number of times `fn` has been called. + + @param fn - Function to get call count from. + @returns A number representing how many times `fn` has been called. + + @example + ``` + import onetime = require('onetime'); + + const foo = onetime(() => {}); + foo(); + foo(); + foo(); + + console.log(onetime.callCount(foo)); + //=> 3 + ``` + */ + callCount(fn: (...arguments: any[]) => unknown): number; + + // TODO: Remove this for the next major release + default: typeof onetime; +}; + +export = onetime; diff --git a/node_modules/onetime/index.js b/node_modules/onetime/index.js new file mode 100644 index 0000000..99c5fc1 --- /dev/null +++ b/node_modules/onetime/index.js @@ -0,0 +1,44 @@ +'use strict'; +const mimicFn = require('mimic-fn'); + +const calledFunctions = new WeakMap(); + +const onetime = (function_, options = {}) => { + if (typeof function_ !== 'function') { + throw new TypeError('Expected a function'); + } + + let returnValue; + let callCount = 0; + const functionName = function_.displayName || function_.name || ''; + + const onetime = function (...arguments_) { + calledFunctions.set(onetime, ++callCount); + + if (callCount === 1) { + returnValue = function_.apply(this, arguments_); + function_ = null; + } else if (options.throw === true) { + throw new Error(`Function \`${functionName}\` can only be called once`); + } + + return returnValue; + }; + + mimicFn(onetime, function_); + calledFunctions.set(onetime, callCount); + + return onetime; +}; + +module.exports = onetime; +// TODO: Remove this for the next major release +module.exports.default = onetime; + +module.exports.callCount = function_ => { + if (!calledFunctions.has(function_)) { + throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`); + } + + return calledFunctions.get(function_); +}; diff --git a/node_modules/onetime/license b/node_modules/onetime/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/onetime/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/onetime/package.json b/node_modules/onetime/package.json new file mode 100644 index 0000000..54caea5 --- /dev/null +++ b/node_modules/onetime/package.json @@ -0,0 +1,43 @@ +{ + "name": "onetime", + "version": "5.1.2", + "description": "Ensure a function is only called once", + "license": "MIT", + "repository": "sindresorhus/onetime", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "once", + "function", + "one", + "onetime", + "func", + "fn", + "single", + "call", + "called", + "prevent" + ], + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/onetime/readme.md b/node_modules/onetime/readme.md new file mode 100644 index 0000000..2d133d3 --- /dev/null +++ b/node_modules/onetime/readme.md @@ -0,0 +1,94 @@ +# onetime [![Build Status](https://travis-ci.com/sindresorhus/onetime.svg?branch=master)](https://travis-ci.com/github/sindresorhus/onetime) + +> Ensure a function is only called once + +When called multiple times it will return the return value from the first call. + +*Unlike the module [once](https://github.com/isaacs/once), this one isn't naughty and extending `Function.prototype`.* + +## Install + +``` +$ npm install onetime +``` + +## Usage + +```js +const onetime = require('onetime'); + +let i = 0; + +const foo = onetime(() => ++i); + +foo(); //=> 1 +foo(); //=> 1 +foo(); //=> 1 + +onetime.callCount(foo); //=> 3 +``` + +```js +const onetime = require('onetime'); + +const foo = onetime(() => {}, {throw: true}); + +foo(); + +foo(); +//=> Error: Function `foo` can only be called once +``` + +## API + +### onetime(fn, options?) + +Returns a function that only calls `fn` once. + +#### fn + +Type: `Function` + +Function that should only be called once. + +#### options + +Type: `object` + +##### throw + +Type: `boolean`\ +Default: `false` + +Throw an error when called more than once. + +### onetime.callCount(fn) + +Returns a number representing how many times `fn` has been called. + +Note: It throws an error if you pass in a function that is not wrapped by `onetime`. + +```js +const onetime = require('onetime'); + +const foo = onetime(() => {}); + +foo(); +foo(); +foo(); + +console.log(onetime.callCount(foo)); +//=> 3 +``` + +#### fn + +Type: `Function` + +Function to get call count from. + +## onetime for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of onetime and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-onetime?utm_source=npm-onetime&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/path-key/index.d.ts b/node_modules/path-key/index.d.ts new file mode 100644 index 0000000..7c575d1 --- /dev/null +++ b/node_modules/path-key/index.d.ts @@ -0,0 +1,40 @@ +/// + +declare namespace pathKey { + interface Options { + /** + Use a custom environment variables object. Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env). + */ + readonly env?: {[key: string]: string | undefined}; + + /** + Get the PATH key for a specific platform. Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform). + */ + readonly platform?: NodeJS.Platform; + } +} + +declare const pathKey: { + /** + Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform. + + @example + ``` + import pathKey = require('path-key'); + + const key = pathKey(); + //=> 'PATH' + + const PATH = process.env[key]; + //=> '/usr/local/bin:/usr/bin:/bin' + ``` + */ + (options?: pathKey.Options): string; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function pathKey(options?: pathKey.Options): string; + // export = pathKey; + default: typeof pathKey; +}; + +export = pathKey; diff --git a/node_modules/path-key/index.js b/node_modules/path-key/index.js new file mode 100644 index 0000000..0cf6415 --- /dev/null +++ b/node_modules/path-key/index.js @@ -0,0 +1,16 @@ +'use strict'; + +const pathKey = (options = {}) => { + const environment = options.env || process.env; + const platform = options.platform || process.platform; + + if (platform !== 'win32') { + return 'PATH'; + } + + return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path'; +}; + +module.exports = pathKey; +// TODO: Remove this for the next major release +module.exports.default = pathKey; diff --git a/node_modules/path-key/license b/node_modules/path-key/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/path-key/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/path-key/package.json b/node_modules/path-key/package.json new file mode 100644 index 0000000..c8cbd38 --- /dev/null +++ b/node_modules/path-key/package.json @@ -0,0 +1,39 @@ +{ + "name": "path-key", + "version": "3.1.1", + "description": "Get the PATH environment variable key cross-platform", + "license": "MIT", + "repository": "sindresorhus/path-key", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "path", + "key", + "environment", + "env", + "variable", + "var", + "get", + "cross-platform", + "windows" + ], + "devDependencies": { + "@types/node": "^11.13.0", + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/path-key/readme.md b/node_modules/path-key/readme.md new file mode 100644 index 0000000..a9052d7 --- /dev/null +++ b/node_modules/path-key/readme.md @@ -0,0 +1,61 @@ +# path-key [![Build Status](https://travis-ci.org/sindresorhus/path-key.svg?branch=master)](https://travis-ci.org/sindresorhus/path-key) + +> Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform + +It's usually `PATH`, but on Windows it can be any casing like `Path`... + + +## Install + +``` +$ npm install path-key +``` + + +## Usage + +```js +const pathKey = require('path-key'); + +const key = pathKey(); +//=> 'PATH' + +const PATH = process.env[key]; +//=> '/usr/local/bin:/usr/bin:/bin' +``` + + +## API + +### pathKey(options?) + +#### options + +Type: `object` + +##### env + +Type: `object`
+Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env) + +Use a custom environment variables object. + +#### platform + +Type: `string`
+Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform) + +Get the PATH key for a specific platform. + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/picomatch/CHANGELOG.md b/node_modules/picomatch/CHANGELOG.md new file mode 100644 index 0000000..8ccc6c1 --- /dev/null +++ b/node_modules/picomatch/CHANGELOG.md @@ -0,0 +1,136 @@ +# Release history + +**All notable changes to this project will be documented in this file.** + +The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +
+ Guiding Principles + +- Changelogs are for humans, not machines. +- There should be an entry for every single version. +- The same types of changes should be grouped. +- Versions and sections should be linkable. +- The latest version comes first. +- The release date of each versions is displayed. +- Mention whether you follow Semantic Versioning. + +
+ +
+ Types of changes + +Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): + +- `Added` for new features. +- `Changed` for changes in existing functionality. +- `Deprecated` for soon-to-be removed features. +- `Removed` for now removed features. +- `Fixed` for any bug fixes. +- `Security` in case of vulnerabilities. + +
+ +## 2.3.1 (2022-01-02) + +### Fixed + +* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). + +### Changed + +* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). + +## 2.3.0 (2021-05-21) + +### Fixed + +* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) + +## 2.2.3 (2021-04-10) + +### Fixed + +* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). +* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). + +## 2.2.2 (2020-03-21) + +### Fixed + +* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). + +## 2.2.1 (2020-01-04) + +* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. + +## 2.2.0 (2020-01-04) + +* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) +* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. + +## 2.1.0 (2019-10-31) + +* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) +* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) +* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) +* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) +* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) +* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) +* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) +* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) +* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) +* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) +* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) +* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) +* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) +* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) +* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) +* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) +* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) +* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) +* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) +* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) +* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) +* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) + +## 2.0.7 (2019-05-14) + +* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) +* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) +* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) +* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) + +## 2.0.4 (2019-04-10) + +### Fixed + +- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. +- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. + +## 2.0.0 (2019-04-10) + +### Added + +- Adds support for `options.onIgnore`. See the readme for details +- Adds support for `options.onResult`. See the readme for details + +### Breaking changes + +- The unixify option was renamed to `windows` +- caching and all related options and methods have been removed + +## 1.0.0 (2018-11-05) + +- adds `.onMatch` option +- improvements to `.scan` method +- numerous improvements and optimizations for matching and parsing +- better windows path handling + +## 0.1.0 - 2017-04-13 + +First release. + + +[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE new file mode 100644 index 0000000..3608dca --- /dev/null +++ b/node_modules/picomatch/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md new file mode 100644 index 0000000..b0526e2 --- /dev/null +++ b/node_modules/picomatch/README.md @@ -0,0 +1,708 @@ +

Picomatch

+ +

+ +version + + +test status + + +coverage status + + +downloads + +

+ +
+
+ +

+Blazing fast and accurate glob matcher written in JavaScript.
+No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. +

+ +
+
+ +## Why picomatch? + +* **Lightweight** - No dependencies +* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. +* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) +* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) +* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. +* **Well tested** - Thousands of unit tests + +See the [library comparison](#library-comparisons) to other libraries. + +
+
+ +## Table of Contents + +
Click to expand + +- [Install](#install) +- [Usage](#usage) +- [API](#api) + * [picomatch](#picomatch) + * [.test](#test) + * [.matchBase](#matchbase) + * [.isMatch](#ismatch) + * [.parse](#parse) + * [.scan](#scan) + * [.compileRe](#compilere) + * [.makeRe](#makere) + * [.toRegex](#toregex) +- [Options](#options) + * [Picomatch options](#picomatch-options) + * [Scan Options](#scan-options) + * [Options Examples](#options-examples) +- [Globbing features](#globbing-features) + * [Basic globbing](#basic-globbing) + * [Advanced globbing](#advanced-globbing) + * [Braces](#braces) + * [Matching special characters as literals](#matching-special-characters-as-literals) +- [Library Comparisons](#library-comparisons) +- [Benchmarks](#benchmarks) +- [Philosophies](#philosophies) +- [About](#about) + * [Author](#author) + * [License](#license) + +_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ + +
+ +
+
+ +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +npm install --save picomatch +``` + +
+ +## Usage + +The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. + +```js +const pm = require('picomatch'); +const isMatch = pm('*.js'); + +console.log(isMatch('abcd')); //=> false +console.log(isMatch('a.js')); //=> true +console.log(isMatch('a.md')); //=> false +console.log(isMatch('a/b.js')); //=> false +``` + +
+ +## API + +### [picomatch](lib/picomatch.js#L32) + +Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. + +**Params** + +* `globs` **{String|Array}**: One or more glob patterns. +* `options` **{Object=}** +* `returns` **{Function=}**: Returns a matcher function. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch(glob[, options]); + +const isMatch = picomatch('*.!(*a)'); +console.log(isMatch('a.a')); //=> false +console.log(isMatch('a.b')); //=> true +``` + +### [.test](lib/picomatch.js#L117) + +Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. + +**Params** + +* `input` **{String}**: String to test. +* `regex` **{RegExp}** +* `returns` **{Object}**: Returns an object with matching info. + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.test(input, regex[, options]); + +console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); +// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } +``` + +### [.matchBase](lib/picomatch.js#L161) + +Match the basename of a filepath. + +**Params** + +* `input` **{String}**: String to test. +* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). +* `returns` **{Boolean}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.matchBase(input, glob[, options]); +console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true +``` + +### [.isMatch](lib/picomatch.js#L183) + +Returns true if **any** of the given glob `patterns` match the specified `string`. + +**Params** + +* **{String|Array}**: str The string to test. +* **{String|Array}**: patterns One or more glob patterns to use for matching. +* **{Object}**: See available [options](#options). +* `returns` **{Boolean}**: Returns true if any patterns match `str` + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.isMatch(string, patterns[, options]); + +console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true +console.log(picomatch.isMatch('a.a', 'b.*')); //=> false +``` + +### [.parse](lib/picomatch.js#L199) + +Parse a glob pattern to create the source string for a regular expression. + +**Params** + +* `pattern` **{String}** +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.parse(pattern[, options]); +``` + +### [.scan](lib/picomatch.js#L231) + +Scan a glob pattern to separate the pattern into segments. + +**Params** + +* `input` **{String}**: Glob pattern to scan. +* `options` **{Object}** +* `returns` **{Object}**: Returns an object with + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.scan(input[, options]); + +const result = picomatch.scan('!./foo/*.js'); +console.log(result); +{ prefix: '!./', + input: '!./foo/*.js', + start: 3, + base: 'foo', + glob: '*.js', + isBrace: false, + isBracket: false, + isGlob: true, + isExtglob: false, + isGlobstar: false, + negated: true } +``` + +### [.compileRe](lib/picomatch.js#L245) + +Compile a regular expression from the `state` object returned by the +[parse()](#parse) method. + +**Params** + +* `state` **{Object}** +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. +* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. +* `returns` **{RegExp}** + +### [.makeRe](lib/picomatch.js#L286) + +Create a regular expression from a parsed glob pattern. + +**Params** + +* `state` **{String}**: The object returned from the `.parse` method. +* `options` **{Object}** +* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. +* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. +* `returns` **{RegExp}**: Returns a regex created from the given pattern. + +**Example** + +```js +const picomatch = require('picomatch'); +const state = picomatch.parse('*.js'); +// picomatch.compileRe(state[, options]); + +console.log(picomatch.compileRe(state)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +### [.toRegex](lib/picomatch.js#L321) + +Create a regular expression from the given regex source string. + +**Params** + +* `source` **{String}**: Regular expression source string. +* `options` **{Object}** +* `returns` **{RegExp}** + +**Example** + +```js +const picomatch = require('picomatch'); +// picomatch.toRegex(source[, options]); + +const { output } = picomatch.parse('*.js'); +console.log(picomatch.toRegex(output)); +//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ +``` + +
+ +## Options + +### Picomatch options + +The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | +| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | +| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | +| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | +| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | +| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | +| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | +| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | +| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | +| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | +| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | +| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | +| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | +| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | +| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | +| `matchBase` | `boolean` | `false` | Alias for `basename` | +| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | +| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | +| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | +| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | +| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | +| `noext` | `boolean` | `false` | Alias for `noextglob` | +| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | +| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | +| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | +| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | +| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | +| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | +| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | +| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | +| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | +| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | +| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | +| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | +| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | +| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | +| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | + +picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. + +### Scan Options + +In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. + +| **Option** | **Type** | **Default value** | **Description** | +| --- | --- | --- | --- | +| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | +| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | + +**Example** + +```js +const picomatch = require('picomatch'); +const result = picomatch.scan('!./foo/*.js', { tokens: true }); +console.log(result); +// { +// prefix: '!./', +// input: '!./foo/*.js', +// start: 3, +// base: 'foo', +// glob: '*.js', +// isBrace: false, +// isBracket: false, +// isGlob: true, +// isExtglob: false, +// isGlobstar: false, +// negated: true, +// maxDepth: 2, +// tokens: [ +// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, +// { value: 'foo', depth: 1, isGlob: false }, +// { value: '*.js', depth: 1, isGlob: true } +// ], +// slashes: [ 2, 6 ], +// parts: [ 'foo', '*.js' ] +// } +``` + +
+ +### Options Examples + +#### options.expandRange + +**Type**: `function` + +**Default**: `undefined` + +Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. + +**Example** + +The following example shows how to create a glob that matches a folder + +```js +const fill = require('fill-range'); +const regex = pm.makeRe('foo/{01..25}/bar', { + expandRange(a, b) { + return `(${fill(a, b, { toRegex: true })})`; + } +}); + +console.log(regex); +//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ + +console.log(regex.test('foo/00/bar')) // false +console.log(regex.test('foo/01/bar')) // true +console.log(regex.test('foo/10/bar')) // true +console.log(regex.test('foo/22/bar')) // true +console.log(regex.test('foo/25/bar')) // true +console.log(regex.test('foo/26/bar')) // false +``` + +#### options.format + +**Type**: `function` + +**Default**: `undefined` + +Custom function for formatting strings before they're matched. + +**Example** + +```js +// strip leading './' from strings +const format = str => str.replace(/^\.\//, ''); +const isMatch = picomatch('foo/*.js', { format }); +console.log(isMatch('./foo/bar.js')); //=> true +``` + +#### options.onMatch + +```js +const onMatch = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onMatch }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onIgnore + +```js +const onIgnore = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +#### options.onResult + +```js +const onResult = ({ glob, regex, input, output }) => { + console.log({ glob, regex, input, output }); +}; + +const isMatch = picomatch('*', { onResult, ignore: 'f*' }); +isMatch('foo'); +isMatch('bar'); +isMatch('baz'); +``` + +
+
+ +## Globbing features + +* [Basic globbing](#basic-globbing) (Wildcard matching) +* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) + +### Basic globbing + +| **Character** | **Description** | +| --- | --- | +| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | +| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | +| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | +| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | + +#### Matching behavior vs. Bash + +Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: + +* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. +* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. + +
+ +### Advanced globbing + +* [extglobs](#extglobs) +* [POSIX brackets](#posix-brackets) +* [Braces](#brace-expansion) + +#### Extglobs + +| **Pattern** | **Description** | +| --- | --- | +| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | +| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | +| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | +| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | +| `!(pattern)` | Match _anything but_ `pattern` | + +**Examples** + +```js +const pm = require('picomatch'); + +// *(pattern) matches ZERO or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// +(pattern) matches ONE or more of "pattern" +console.log(pm.isMatch('a', 'a*(z)')); // true +console.log(pm.isMatch('az', 'a*(z)')); // true +console.log(pm.isMatch('azzz', 'a*(z)')); // true + +// supports multiple extglobs +console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false + +// supports nested extglobs +console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true +``` + +#### POSIX brackets + +POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. + +**Enable POSIX bracket support** + +```js +console.log(pm.makeRe('[[:word:]]+', { posix: true })); +//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ +``` + +**Supported POSIX classes** + +The following named POSIX bracket expressions are supported: + +* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` +* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. +* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. +* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. +* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. +* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. +* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. +* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. +* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. +* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. +* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. +* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. +* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. +* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. + +See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. + +### Braces + +Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. + +### Matching special characters as literals + +If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: + +**Special Characters** + +Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. + +To match any of the following characters as literals: `$^*+?()[] + +Examples: + +```js +console.log(pm.makeRe('foo/bar \\(1\\)')); +console.log(pm.makeRe('foo/bar \\(1\\)')); +``` + +
+
+ +## Library Comparisons + +The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). + +| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | +| --- | --- | --- | --- | --- | --- | --- | --- | +| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | +| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | +| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | +| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | +| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | +| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | +| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | +| File system operations | - | - | - | - | - | - | - | + +
+
+ +## Benchmarks + +Performance comparison of picomatch and minimatch. + +``` +# .makeRe star + picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) + minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) + +# .makeRe star; dot=true + picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) + minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) + +# .makeRe globstar + picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) + minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) + +# .makeRe globstars + picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) + minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) + +# .makeRe with leading star + picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) + minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) + +# .makeRe - basic braces + picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) + minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) +``` + +
+
+ +## Philosophies + +The goal of this library is to be blazing fast, without compromising on accuracy. + +**Accuracy** + +The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. + +Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. + +**Performance** + +Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. + +
+
+ +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +### License + +Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js new file mode 100644 index 0000000..d2f2bc5 --- /dev/null +++ b/node_modules/picomatch/index.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./lib/picomatch'); diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js new file mode 100644 index 0000000..a62ef38 --- /dev/null +++ b/node_modules/picomatch/lib/constants.js @@ -0,0 +1,179 @@ +'use strict'; + +const path = require('path'); +const WIN_SLASH = '\\\\/'; +const WIN_NO_SLASH = `[^${WIN_SLASH}]`; + +/** + * Posix glob regex + */ + +const DOT_LITERAL = '\\.'; +const PLUS_LITERAL = '\\+'; +const QMARK_LITERAL = '\\?'; +const SLASH_LITERAL = '\\/'; +const ONE_CHAR = '(?=.)'; +const QMARK = '[^/]'; +const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; +const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; +const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; +const NO_DOT = `(?!${DOT_LITERAL})`; +const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; +const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; +const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; +const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; +const STAR = `${QMARK}*?`; + +const POSIX_CHARS = { + DOT_LITERAL, + PLUS_LITERAL, + QMARK_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + QMARK, + END_ANCHOR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK_NO_DOT, + STAR, + START_ANCHOR +}; + +/** + * Windows glob regex + */ + +const WINDOWS_CHARS = { + ...POSIX_CHARS, + + SLASH_LITERAL: `[${WIN_SLASH}]`, + QMARK: WIN_NO_SLASH, + STAR: `${WIN_NO_SLASH}*?`, + DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, + NO_DOT: `(?!${DOT_LITERAL})`, + NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, + NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, + QMARK_NO_DOT: `[^.${WIN_SLASH}]`, + START_ANCHOR: `(?:^|[${WIN_SLASH}])`, + END_ANCHOR: `(?:[${WIN_SLASH}]|$)` +}; + +/** + * POSIX Bracket Regex + */ + +const POSIX_REGEX_SOURCE = { + alnum: 'a-zA-Z0-9', + alpha: 'a-zA-Z', + ascii: '\\x00-\\x7F', + blank: ' \\t', + cntrl: '\\x00-\\x1F\\x7F', + digit: '0-9', + graph: '\\x21-\\x7E', + lower: 'a-z', + print: '\\x20-\\x7E ', + punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', + space: ' \\t\\r\\n\\v\\f', + upper: 'A-Z', + word: 'A-Za-z0-9_', + xdigit: 'A-Fa-f0-9' +}; + +module.exports = { + MAX_LENGTH: 1024 * 64, + POSIX_REGEX_SOURCE, + + // regular expressions + REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, + REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, + REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, + REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, + REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, + REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, + + // Replace globs with equivalent patterns to reduce parsing time. + REPLACEMENTS: { + '***': '*', + '**/**': '**', + '**/**/**': '**' + }, + + // Digits + CHAR_0: 48, /* 0 */ + CHAR_9: 57, /* 9 */ + + // Alphabet chars. + CHAR_UPPERCASE_A: 65, /* A */ + CHAR_LOWERCASE_A: 97, /* a */ + CHAR_UPPERCASE_Z: 90, /* Z */ + CHAR_LOWERCASE_Z: 122, /* z */ + + CHAR_LEFT_PARENTHESES: 40, /* ( */ + CHAR_RIGHT_PARENTHESES: 41, /* ) */ + + CHAR_ASTERISK: 42, /* * */ + + // Non-alphabetic chars. + CHAR_AMPERSAND: 38, /* & */ + CHAR_AT: 64, /* @ */ + CHAR_BACKWARD_SLASH: 92, /* \ */ + CHAR_CARRIAGE_RETURN: 13, /* \r */ + CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ + CHAR_COLON: 58, /* : */ + CHAR_COMMA: 44, /* , */ + CHAR_DOT: 46, /* . */ + CHAR_DOUBLE_QUOTE: 34, /* " */ + CHAR_EQUAL: 61, /* = */ + CHAR_EXCLAMATION_MARK: 33, /* ! */ + CHAR_FORM_FEED: 12, /* \f */ + CHAR_FORWARD_SLASH: 47, /* / */ + CHAR_GRAVE_ACCENT: 96, /* ` */ + CHAR_HASH: 35, /* # */ + CHAR_HYPHEN_MINUS: 45, /* - */ + CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ + CHAR_LEFT_CURLY_BRACE: 123, /* { */ + CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ + CHAR_LINE_FEED: 10, /* \n */ + CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ + CHAR_PERCENT: 37, /* % */ + CHAR_PLUS: 43, /* + */ + CHAR_QUESTION_MARK: 63, /* ? */ + CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ + CHAR_RIGHT_CURLY_BRACE: 125, /* } */ + CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ + CHAR_SEMICOLON: 59, /* ; */ + CHAR_SINGLE_QUOTE: 39, /* ' */ + CHAR_SPACE: 32, /* */ + CHAR_TAB: 9, /* \t */ + CHAR_UNDERSCORE: 95, /* _ */ + CHAR_VERTICAL_LINE: 124, /* | */ + CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ + + SEP: path.sep, + + /** + * Create EXTGLOB_CHARS + */ + + extglobChars(chars) { + return { + '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, + '?': { type: 'qmark', open: '(?:', close: ')?' }, + '+': { type: 'plus', open: '(?:', close: ')+' }, + '*': { type: 'star', open: '(?:', close: ')*' }, + '@': { type: 'at', open: '(?:', close: ')' } + }; + }, + + /** + * Create GLOB_CHARS + */ + + globChars(win32) { + return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; + } +}; diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js new file mode 100644 index 0000000..58269d0 --- /dev/null +++ b/node_modules/picomatch/lib/parse.js @@ -0,0 +1,1091 @@ +'use strict'; + +const constants = require('./constants'); +const utils = require('./utils'); + +/** + * Constants + */ + +const { + MAX_LENGTH, + POSIX_REGEX_SOURCE, + REGEX_NON_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_BACKREF, + REPLACEMENTS +} = constants; + +/** + * Helpers + */ + +const expandRange = (args, options) => { + if (typeof options.expandRange === 'function') { + return options.expandRange(...args, options); + } + + args.sort(); + const value = `[${args.join('-')}]`; + + try { + /* eslint-disable-next-line no-new */ + new RegExp(value); + } catch (ex) { + return args.map(v => utils.escapeRegex(v)).join('..'); + } + + return value; +}; + +/** + * Create the message for a syntax error + */ + +const syntaxError = (type, char) => { + return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; +}; + +/** + * Parse the given input string. + * @param {String} input + * @param {Object} options + * @return {Object} + */ + +const parse = (input, options) => { + if (typeof input !== 'string') { + throw new TypeError('Expected a string'); + } + + input = REPLACEMENTS[input] || input; + + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + + let len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + const bos = { type: 'bos', value: '', output: opts.prepend || '' }; + const tokens = [bos]; + + const capture = opts.capture ? '' : '?:'; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const PLATFORM_CHARS = constants.globChars(win32); + const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); + + const { + DOT_LITERAL, + PLUS_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOT_SLASH, + NO_DOTS_SLASH, + QMARK, + QMARK_NO_DOT, + STAR, + START_ANCHOR + } = PLATFORM_CHARS; + + const globstar = opts => { + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const nodot = opts.dot ? '' : NO_DOT; + const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; + let star = opts.bash === true ? globstar(opts) : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + // minimatch options support + if (typeof opts.noext === 'boolean') { + opts.noextglob = opts.noext; + } + + const state = { + input, + index: -1, + start: 0, + dot: opts.dot === true, + consumed: '', + output: '', + prefix: '', + backtrack: false, + negated: false, + brackets: 0, + braces: 0, + parens: 0, + quotes: 0, + globstar: false, + tokens + }; + + input = utils.removePrefix(input, state); + len = input.length; + + const extglobs = []; + const braces = []; + const stack = []; + let prev = bos; + let value; + + /** + * Tokenizing helpers + */ + + const eos = () => state.index === len - 1; + const peek = state.peek = (n = 1) => input[state.index + n]; + const advance = state.advance = () => input[++state.index] || ''; + const remaining = () => input.slice(state.index + 1); + const consume = (value = '', num = 0) => { + state.consumed += value; + state.index += num; + }; + + const append = token => { + state.output += token.output != null ? token.output : token.value; + consume(token.value); + }; + + const negate = () => { + let count = 1; + + while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { + advance(); + state.start++; + count++; + } + + if (count % 2 === 0) { + return false; + } + + state.negated = true; + state.start++; + return true; + }; + + const increment = type => { + state[type]++; + stack.push(type); + }; + + const decrement = type => { + state[type]--; + stack.pop(); + }; + + /** + * Push tokens onto the tokens array. This helper speeds up + * tokenizing by 1) helping us avoid backtracking as much as possible, + * and 2) helping us avoid creating extra tokens when consecutive + * characters are plain text. This improves performance and simplifies + * lookbehinds. + */ + + const push = tok => { + if (prev.type === 'globstar') { + const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); + const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); + + if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { + state.output = state.output.slice(0, -prev.output.length); + prev.type = 'star'; + prev.value = '*'; + prev.output = star; + state.output += prev.output; + } + } + + if (extglobs.length && tok.type !== 'paren') { + extglobs[extglobs.length - 1].inner += tok.value; + } + + if (tok.value || tok.output) append(tok); + if (prev && prev.type === 'text' && tok.type === 'text') { + prev.value += tok.value; + prev.output = (prev.output || '') + tok.value; + return; + } + + tok.prev = prev; + tokens.push(tok); + prev = tok; + }; + + const extglobOpen = (type, value) => { + const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; + + token.prev = prev; + token.parens = state.parens; + token.output = state.output; + const output = (opts.capture ? '(' : '') + token.open; + + increment('parens'); + push({ type, value, output: state.output ? '' : ONE_CHAR }); + push({ type: 'paren', extglob: true, value: advance(), output }); + extglobs.push(token); + }; + + const extglobClose = token => { + let output = token.close + (opts.capture ? ')' : ''); + let rest; + + if (token.type === 'negate') { + let extglobStar = star; + + if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { + extglobStar = globstar(opts); + } + + if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { + output = token.close = `)$))${extglobStar}`; + } + + if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { + // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. + // In this case, we need to parse the string and use it in the output of the original pattern. + // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. + // + // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. + const expression = parse(rest, { ...options, fastpaths: false }).output; + + output = token.close = `)${expression})${extglobStar})`; + } + + if (token.prev.type === 'bos') { + state.negatedExtglob = true; + } + } + + push({ type: 'paren', extglob: true, value, output }); + decrement('parens'); + }; + + /** + * Fast paths + */ + + if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { + let backslashes = false; + + let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { + if (first === '\\') { + backslashes = true; + return m; + } + + if (first === '?') { + if (esc) { + return esc + first + (rest ? QMARK.repeat(rest.length) : ''); + } + if (index === 0) { + return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); + } + return QMARK.repeat(chars.length); + } + + if (first === '.') { + return DOT_LITERAL.repeat(chars.length); + } + + if (first === '*') { + if (esc) { + return esc + first + (rest ? star : ''); + } + return star; + } + return esc ? m : `\\${m}`; + }); + + if (backslashes === true) { + if (opts.unescape === true) { + output = output.replace(/\\/g, ''); + } else { + output = output.replace(/\\+/g, m => { + return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); + }); + } + } + + if (output === input && opts.contains === true) { + state.output = input; + return state; + } + + state.output = utils.wrapOutput(output, state, options); + return state; + } + + /** + * Tokenize input until we reach end-of-string + */ + + while (!eos()) { + value = advance(); + + if (value === '\u0000') { + continue; + } + + /** + * Escaped characters + */ + + if (value === '\\') { + const next = peek(); + + if (next === '/' && opts.bash !== true) { + continue; + } + + if (next === '.' || next === ';') { + continue; + } + + if (!next) { + value += '\\'; + push({ type: 'text', value }); + continue; + } + + // collapse slashes to reduce potential for exploits + const match = /^\\+/.exec(remaining()); + let slashes = 0; + + if (match && match[0].length > 2) { + slashes = match[0].length; + state.index += slashes; + if (slashes % 2 !== 0) { + value += '\\'; + } + } + + if (opts.unescape === true) { + value = advance(); + } else { + value += advance(); + } + + if (state.brackets === 0) { + push({ type: 'text', value }); + continue; + } + } + + /** + * If we're inside a regex character class, continue + * until we reach the closing bracket. + */ + + if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { + if (opts.posix !== false && value === ':') { + const inner = prev.value.slice(1); + if (inner.includes('[')) { + prev.posix = true; + + if (inner.includes(':')) { + const idx = prev.value.lastIndexOf('['); + const pre = prev.value.slice(0, idx); + const rest = prev.value.slice(idx + 2); + const posix = POSIX_REGEX_SOURCE[rest]; + if (posix) { + prev.value = pre + posix; + state.backtrack = true; + advance(); + + if (!bos.output && tokens.indexOf(prev) === 1) { + bos.output = ONE_CHAR; + } + continue; + } + } + } + } + + if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { + value = `\\${value}`; + } + + if (value === ']' && (prev.value === '[' || prev.value === '[^')) { + value = `\\${value}`; + } + + if (opts.posix === true && value === '!' && prev.value === '[') { + value = '^'; + } + + prev.value += value; + append({ value }); + continue; + } + + /** + * If we're inside a quoted string, continue + * until we reach the closing double quote. + */ + + if (state.quotes === 1 && value !== '"') { + value = utils.escapeRegex(value); + prev.value += value; + append({ value }); + continue; + } + + /** + * Double quotes + */ + + if (value === '"') { + state.quotes = state.quotes === 1 ? 0 : 1; + if (opts.keepQuotes === true) { + push({ type: 'text', value }); + } + continue; + } + + /** + * Parentheses + */ + + if (value === '(') { + increment('parens'); + push({ type: 'paren', value }); + continue; + } + + if (value === ')') { + if (state.parens === 0 && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '(')); + } + + const extglob = extglobs[extglobs.length - 1]; + if (extglob && state.parens === extglob.parens + 1) { + extglobClose(extglobs.pop()); + continue; + } + + push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); + decrement('parens'); + continue; + } + + /** + * Square brackets + */ + + if (value === '[') { + if (opts.nobracket === true || !remaining().includes(']')) { + if (opts.nobracket !== true && opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('closing', ']')); + } + + value = `\\${value}`; + } else { + increment('brackets'); + } + + push({ type: 'bracket', value }); + continue; + } + + if (value === ']') { + if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + if (state.brackets === 0) { + if (opts.strictBrackets === true) { + throw new SyntaxError(syntaxError('opening', '[')); + } + + push({ type: 'text', value, output: `\\${value}` }); + continue; + } + + decrement('brackets'); + + const prevValue = prev.value.slice(1); + if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { + value = `/${value}`; + } + + prev.value += value; + append({ value }); + + // when literal brackets are explicitly disabled + // assume we should match with a regex character class + if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { + continue; + } + + const escaped = utils.escapeRegex(prev.value); + state.output = state.output.slice(0, -prev.value.length); + + // when literal brackets are explicitly enabled + // assume we should escape the brackets to match literal characters + if (opts.literalBrackets === true) { + state.output += escaped; + prev.value = escaped; + continue; + } + + // when the user specifies nothing, try to match both + prev.value = `(${capture}${escaped}|${prev.value})`; + state.output += prev.value; + continue; + } + + /** + * Braces + */ + + if (value === '{' && opts.nobrace !== true) { + increment('braces'); + + const open = { + type: 'brace', + value, + output: '(', + outputIndex: state.output.length, + tokensIndex: state.tokens.length + }; + + braces.push(open); + push(open); + continue; + } + + if (value === '}') { + const brace = braces[braces.length - 1]; + + if (opts.nobrace === true || !brace) { + push({ type: 'text', value, output: value }); + continue; + } + + let output = ')'; + + if (brace.dots === true) { + const arr = tokens.slice(); + const range = []; + + for (let i = arr.length - 1; i >= 0; i--) { + tokens.pop(); + if (arr[i].type === 'brace') { + break; + } + if (arr[i].type !== 'dots') { + range.unshift(arr[i].value); + } + } + + output = expandRange(range, opts); + state.backtrack = true; + } + + if (brace.comma !== true && brace.dots !== true) { + const out = state.output.slice(0, brace.outputIndex); + const toks = state.tokens.slice(brace.tokensIndex); + brace.value = brace.output = '\\{'; + value = output = '\\}'; + state.output = out; + for (const t of toks) { + state.output += (t.output || t.value); + } + } + + push({ type: 'brace', value, output }); + decrement('braces'); + braces.pop(); + continue; + } + + /** + * Pipes + */ + + if (value === '|') { + if (extglobs.length > 0) { + extglobs[extglobs.length - 1].conditions++; + } + push({ type: 'text', value }); + continue; + } + + /** + * Commas + */ + + if (value === ',') { + let output = value; + + const brace = braces[braces.length - 1]; + if (brace && stack[stack.length - 1] === 'braces') { + brace.comma = true; + output = '|'; + } + + push({ type: 'comma', value, output }); + continue; + } + + /** + * Slashes + */ + + if (value === '/') { + // if the beginning of the glob is "./", advance the start + // to the current index, and don't add the "./" characters + // to the state. This greatly simplifies lookbehinds when + // checking for BOS characters like "!" and "." (not "./") + if (prev.type === 'dot' && state.index === state.start + 1) { + state.start = state.index + 1; + state.consumed = ''; + state.output = ''; + tokens.pop(); + prev = bos; // reset "prev" to the first token + continue; + } + + push({ type: 'slash', value, output: SLASH_LITERAL }); + continue; + } + + /** + * Dots + */ + + if (value === '.') { + if (state.braces > 0 && prev.type === 'dot') { + if (prev.value === '.') prev.output = DOT_LITERAL; + const brace = braces[braces.length - 1]; + prev.type = 'dots'; + prev.output += value; + prev.value += value; + brace.dots = true; + continue; + } + + if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { + push({ type: 'text', value, output: DOT_LITERAL }); + continue; + } + + push({ type: 'dot', value, output: DOT_LITERAL }); + continue; + } + + /** + * Question marks + */ + + if (value === '?') { + const isGroup = prev && prev.value === '('; + if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('qmark', value); + continue; + } + + if (prev && prev.type === 'paren') { + const next = peek(); + let output = value; + + if (next === '<' && !utils.supportsLookbehinds()) { + throw new Error('Node.js v10 or higher is required for regex lookbehinds'); + } + + if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { + output = `\\${value}`; + } + + push({ type: 'text', value, output }); + continue; + } + + if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { + push({ type: 'qmark', value, output: QMARK_NO_DOT }); + continue; + } + + push({ type: 'qmark', value, output: QMARK }); + continue; + } + + /** + * Exclamation + */ + + if (value === '!') { + if (opts.noextglob !== true && peek() === '(') { + if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { + extglobOpen('negate', value); + continue; + } + } + + if (opts.nonegate !== true && state.index === 0) { + negate(); + continue; + } + } + + /** + * Plus + */ + + if (value === '+') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + extglobOpen('plus', value); + continue; + } + + if ((prev && prev.value === '(') || opts.regex === false) { + push({ type: 'plus', value, output: PLUS_LITERAL }); + continue; + } + + if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { + push({ type: 'plus', value }); + continue; + } + + push({ type: 'plus', value: PLUS_LITERAL }); + continue; + } + + /** + * Plain text + */ + + if (value === '@') { + if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { + push({ type: 'at', extglob: true, value, output: '' }); + continue; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Plain text + */ + + if (value !== '*') { + if (value === '$' || value === '^') { + value = `\\${value}`; + } + + const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); + if (match) { + value += match[0]; + state.index += match[0].length; + } + + push({ type: 'text', value }); + continue; + } + + /** + * Stars + */ + + if (prev && (prev.type === 'globstar' || prev.star === true)) { + prev.type = 'star'; + prev.star = true; + prev.value += value; + prev.output = star; + state.backtrack = true; + state.globstar = true; + consume(value); + continue; + } + + let rest = remaining(); + if (opts.noextglob !== true && /^\([^?]/.test(rest)) { + extglobOpen('star', value); + continue; + } + + if (prev.type === 'star') { + if (opts.noglobstar === true) { + consume(value); + continue; + } + + const prior = prev.prev; + const before = prior.prev; + const isStart = prior.type === 'slash' || prior.type === 'bos'; + const afterStar = before && (before.type === 'star' || before.type === 'globstar'); + + if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { + push({ type: 'star', value, output: '' }); + continue; + } + + const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); + const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); + if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { + push({ type: 'star', value, output: '' }); + continue; + } + + // strip consecutive `/**/` + while (rest.slice(0, 3) === '/**') { + const after = input[state.index + 4]; + if (after && after !== '/') { + break; + } + rest = rest.slice(3); + consume('/**', 3); + } + + if (prior.type === 'bos' && eos()) { + prev.type = 'globstar'; + prev.value += value; + prev.output = globstar(opts); + state.output = prev.output; + state.globstar = true; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); + prev.value += value; + state.globstar = true; + state.output += prior.output + prev.output; + consume(value); + continue; + } + + if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { + const end = rest[1] !== void 0 ? '|$' : ''; + + state.output = state.output.slice(0, -(prior.output + prev.output).length); + prior.output = `(?:${prior.output}`; + + prev.type = 'globstar'; + prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; + prev.value += value; + + state.output += prior.output + prev.output; + state.globstar = true; + + consume(value + advance()); + + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + if (prior.type === 'bos' && rest[0] === '/') { + prev.type = 'globstar'; + prev.value += value; + prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; + state.output = prev.output; + state.globstar = true; + consume(value + advance()); + push({ type: 'slash', value: '/', output: '' }); + continue; + } + + // remove single star from output + state.output = state.output.slice(0, -prev.output.length); + + // reset previous token to globstar + prev.type = 'globstar'; + prev.output = globstar(opts); + prev.value += value; + + // reset output with globstar + state.output += prev.output; + state.globstar = true; + consume(value); + continue; + } + + const token = { type: 'star', value, output: star }; + + if (opts.bash === true) { + token.output = '.*?'; + if (prev.type === 'bos' || prev.type === 'slash') { + token.output = nodot + token.output; + } + push(token); + continue; + } + + if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { + token.output = value; + push(token); + continue; + } + + if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { + if (prev.type === 'dot') { + state.output += NO_DOT_SLASH; + prev.output += NO_DOT_SLASH; + + } else if (opts.dot === true) { + state.output += NO_DOTS_SLASH; + prev.output += NO_DOTS_SLASH; + + } else { + state.output += nodot; + prev.output += nodot; + } + + if (peek() !== '*') { + state.output += ONE_CHAR; + prev.output += ONE_CHAR; + } + } + + push(token); + } + + while (state.brackets > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); + state.output = utils.escapeLast(state.output, '['); + decrement('brackets'); + } + + while (state.parens > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); + state.output = utils.escapeLast(state.output, '('); + decrement('parens'); + } + + while (state.braces > 0) { + if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); + state.output = utils.escapeLast(state.output, '{'); + decrement('braces'); + } + + if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { + push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); + } + + // rebuild the output if we had to backtrack at any point + if (state.backtrack === true) { + state.output = ''; + + for (const token of state.tokens) { + state.output += token.output != null ? token.output : token.value; + + if (token.suffix) { + state.output += token.suffix; + } + } + } + + return state; +}; + +/** + * Fast paths for creating regular expressions for common glob patterns. + * This can significantly speed up processing and has very little downside + * impact when none of the fast paths match. + */ + +parse.fastpaths = (input, options) => { + const opts = { ...options }; + const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; + const len = input.length; + if (len > max) { + throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); + } + + input = REPLACEMENTS[input] || input; + const win32 = utils.isWindows(options); + + // create constants based on platform, for windows or posix + const { + DOT_LITERAL, + SLASH_LITERAL, + ONE_CHAR, + DOTS_SLASH, + NO_DOT, + NO_DOTS, + NO_DOTS_SLASH, + STAR, + START_ANCHOR + } = constants.globChars(win32); + + const nodot = opts.dot ? NO_DOTS : NO_DOT; + const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; + const capture = opts.capture ? '' : '?:'; + const state = { negated: false, prefix: '' }; + let star = opts.bash === true ? '.*?' : STAR; + + if (opts.capture) { + star = `(${star})`; + } + + const globstar = opts => { + if (opts.noglobstar === true) return star; + return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; + }; + + const create = str => { + switch (str) { + case '*': + return `${nodot}${ONE_CHAR}${star}`; + + case '.*': + return `${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*.*': + return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '*/*': + return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; + + case '**': + return nodot + globstar(opts); + + case '**/*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; + + case '**/*.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; + + case '**/.*': + return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; + + default: { + const match = /^(.*?)\.(\w+)$/.exec(str); + if (!match) return; + + const source = create(match[1]); + if (!source) return; + + return source + DOT_LITERAL + match[2]; + } + } + }; + + const output = utils.removePrefix(input, state); + let source = create(output); + + if (source && opts.strictSlashes !== true) { + source += `${SLASH_LITERAL}?`; + } + + return source; +}; + +module.exports = parse; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js new file mode 100644 index 0000000..782d809 --- /dev/null +++ b/node_modules/picomatch/lib/picomatch.js @@ -0,0 +1,342 @@ +'use strict'; + +const path = require('path'); +const scan = require('./scan'); +const parse = require('./parse'); +const utils = require('./utils'); +const constants = require('./constants'); +const isObject = val => val && typeof val === 'object' && !Array.isArray(val); + +/** + * Creates a matcher function from one or more glob patterns. The + * returned function takes a string to match as its first argument, + * and returns true if the string is a match. The returned matcher + * function also takes a boolean as the second argument that, when true, + * returns an object with additional information. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch(glob[, options]); + * + * const isMatch = picomatch('*.!(*a)'); + * console.log(isMatch('a.a')); //=> false + * console.log(isMatch('a.b')); //=> true + * ``` + * @name picomatch + * @param {String|Array} `globs` One or more glob patterns. + * @param {Object=} `options` + * @return {Function=} Returns a matcher function. + * @api public + */ + +const picomatch = (glob, options, returnState = false) => { + if (Array.isArray(glob)) { + const fns = glob.map(input => picomatch(input, options, returnState)); + const arrayMatcher = str => { + for (const isMatch of fns) { + const state = isMatch(str); + if (state) return state; + } + return false; + }; + return arrayMatcher; + } + + const isState = isObject(glob) && glob.tokens && glob.input; + + if (glob === '' || (typeof glob !== 'string' && !isState)) { + throw new TypeError('Expected pattern to be a non-empty string'); + } + + const opts = options || {}; + const posix = utils.isWindows(options); + const regex = isState + ? picomatch.compileRe(glob, options) + : picomatch.makeRe(glob, options, false, true); + + const state = regex.state; + delete regex.state; + + let isIgnored = () => false; + if (opts.ignore) { + const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; + isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); + } + + const matcher = (input, returnObject = false) => { + const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); + const result = { glob, state, regex, posix, input, output, match, isMatch }; + + if (typeof opts.onResult === 'function') { + opts.onResult(result); + } + + if (isMatch === false) { + result.isMatch = false; + return returnObject ? result : false; + } + + if (isIgnored(input)) { + if (typeof opts.onIgnore === 'function') { + opts.onIgnore(result); + } + result.isMatch = false; + return returnObject ? result : false; + } + + if (typeof opts.onMatch === 'function') { + opts.onMatch(result); + } + return returnObject ? result : true; + }; + + if (returnState) { + matcher.state = state; + } + + return matcher; +}; + +/** + * Test `input` with the given `regex`. This is used by the main + * `picomatch()` function to test the input string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.test(input, regex[, options]); + * + * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); + * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } + * ``` + * @param {String} `input` String to test. + * @param {RegExp} `regex` + * @return {Object} Returns an object with matching info. + * @api public + */ + +picomatch.test = (input, regex, options, { glob, posix } = {}) => { + if (typeof input !== 'string') { + throw new TypeError('Expected input to be a string'); + } + + if (input === '') { + return { isMatch: false, output: '' }; + } + + const opts = options || {}; + const format = opts.format || (posix ? utils.toPosixSlashes : null); + let match = input === glob; + let output = (match && format) ? format(input) : input; + + if (match === false) { + output = format ? format(input) : input; + match = output === glob; + } + + if (match === false || opts.capture === true) { + if (opts.matchBase === true || opts.basename === true) { + match = picomatch.matchBase(input, regex, options, posix); + } else { + match = regex.exec(output); + } + } + + return { isMatch: Boolean(match), match, output }; +}; + +/** + * Match the basename of a filepath. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.matchBase(input, glob[, options]); + * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true + * ``` + * @param {String} `input` String to test. + * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). + * @return {Boolean} + * @api public + */ + +picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { + const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); + return regex.test(path.basename(input)); +}; + +/** + * Returns true if **any** of the given glob `patterns` match the specified `string`. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.isMatch(string, patterns[, options]); + * + * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true + * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false + * ``` + * @param {String|Array} str The string to test. + * @param {String|Array} patterns One or more glob patterns to use for matching. + * @param {Object} [options] See available [options](#options). + * @return {Boolean} Returns true if any patterns match `str` + * @api public + */ + +picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); + +/** + * Parse a glob pattern to create the source string for a regular + * expression. + * + * ```js + * const picomatch = require('picomatch'); + * const result = picomatch.parse(pattern[, options]); + * ``` + * @param {String} `pattern` + * @param {Object} `options` + * @return {Object} Returns an object with useful properties and output to be used as a regex source string. + * @api public + */ + +picomatch.parse = (pattern, options) => { + if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); + return parse(pattern, { ...options, fastpaths: false }); +}; + +/** + * Scan a glob pattern to separate the pattern into segments. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.scan(input[, options]); + * + * const result = picomatch.scan('!./foo/*.js'); + * console.log(result); + * { prefix: '!./', + * input: '!./foo/*.js', + * start: 3, + * base: 'foo', + * glob: '*.js', + * isBrace: false, + * isBracket: false, + * isGlob: true, + * isExtglob: false, + * isGlobstar: false, + * negated: true } + * ``` + * @param {String} `input` Glob pattern to scan. + * @param {Object} `options` + * @return {Object} Returns an object with + * @api public + */ + +picomatch.scan = (input, options) => scan(input, options); + +/** + * Compile a regular expression from the `state` object returned by the + * [parse()](#parse) method. + * + * @param {Object} `state` + * @param {Object} `options` + * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. + * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. + * @return {RegExp} + * @api public + */ + +picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { + if (returnOutput === true) { + return state.output; + } + + const opts = options || {}; + const prepend = opts.contains ? '' : '^'; + const append = opts.contains ? '' : '$'; + + let source = `${prepend}(?:${state.output})${append}`; + if (state && state.negated === true) { + source = `^(?!${source}).*$`; + } + + const regex = picomatch.toRegex(source, options); + if (returnState === true) { + regex.state = state; + } + + return regex; +}; + +/** + * Create a regular expression from a parsed glob pattern. + * + * ```js + * const picomatch = require('picomatch'); + * const state = picomatch.parse('*.js'); + * // picomatch.compileRe(state[, options]); + * + * console.log(picomatch.compileRe(state)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `state` The object returned from the `.parse` method. + * @param {Object} `options` + * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. + * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. + * @return {RegExp} Returns a regex created from the given pattern. + * @api public + */ + +picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { + if (!input || typeof input !== 'string') { + throw new TypeError('Expected a non-empty string'); + } + + let parsed = { negated: false, fastpaths: true }; + + if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { + parsed.output = parse.fastpaths(input, options); + } + + if (!parsed.output) { + parsed = parse(input, options); + } + + return picomatch.compileRe(parsed, options, returnOutput, returnState); +}; + +/** + * Create a regular expression from the given regex source string. + * + * ```js + * const picomatch = require('picomatch'); + * // picomatch.toRegex(source[, options]); + * + * const { output } = picomatch.parse('*.js'); + * console.log(picomatch.toRegex(output)); + * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ + * ``` + * @param {String} `source` Regular expression source string. + * @param {Object} `options` + * @return {RegExp} + * @api public + */ + +picomatch.toRegex = (source, options) => { + try { + const opts = options || {}; + return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); + } catch (err) { + if (options && options.debug === true) throw err; + return /$^/; + } +}; + +/** + * Picomatch constants. + * @return {Object} + */ + +picomatch.constants = constants; + +/** + * Expose "picomatch" + */ + +module.exports = picomatch; diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js new file mode 100644 index 0000000..e59cd7a --- /dev/null +++ b/node_modules/picomatch/lib/scan.js @@ -0,0 +1,391 @@ +'use strict'; + +const utils = require('./utils'); +const { + CHAR_ASTERISK, /* * */ + CHAR_AT, /* @ */ + CHAR_BACKWARD_SLASH, /* \ */ + CHAR_COMMA, /* , */ + CHAR_DOT, /* . */ + CHAR_EXCLAMATION_MARK, /* ! */ + CHAR_FORWARD_SLASH, /* / */ + CHAR_LEFT_CURLY_BRACE, /* { */ + CHAR_LEFT_PARENTHESES, /* ( */ + CHAR_LEFT_SQUARE_BRACKET, /* [ */ + CHAR_PLUS, /* + */ + CHAR_QUESTION_MARK, /* ? */ + CHAR_RIGHT_CURLY_BRACE, /* } */ + CHAR_RIGHT_PARENTHESES, /* ) */ + CHAR_RIGHT_SQUARE_BRACKET /* ] */ +} = require('./constants'); + +const isPathSeparator = code => { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +}; + +const depth = token => { + if (token.isPrefix !== true) { + token.depth = token.isGlobstar ? Infinity : 1; + } +}; + +/** + * Quickly scans a glob pattern and returns an object with a handful of + * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), + * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not + * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). + * + * ```js + * const pm = require('picomatch'); + * console.log(pm.scan('foo/bar/*.js')); + * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } + * ``` + * @param {String} `str` + * @param {Object} `options` + * @return {Object} Returns an object with tokens and regex source string. + * @api public + */ + +const scan = (input, options) => { + const opts = options || {}; + + const length = input.length - 1; + const scanToEnd = opts.parts === true || opts.scanToEnd === true; + const slashes = []; + const tokens = []; + const parts = []; + + let str = input; + let index = -1; + let start = 0; + let lastIndex = 0; + let isBrace = false; + let isBracket = false; + let isGlob = false; + let isExtglob = false; + let isGlobstar = false; + let braceEscaped = false; + let backslashes = false; + let negated = false; + let negatedExtglob = false; + let finished = false; + let braces = 0; + let prev; + let code; + let token = { value: '', depth: 0, isGlob: false }; + + const eos = () => index >= length; + const peek = () => str.charCodeAt(index + 1); + const advance = () => { + prev = code; + return str.charCodeAt(++index); + }; + + while (index < length) { + code = advance(); + let next; + + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + + if (code === CHAR_LEFT_CURLY_BRACE) { + braceEscaped = true; + } + continue; + } + + if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { + braces++; + + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (code === CHAR_LEFT_CURLY_BRACE) { + braces++; + continue; + } + + if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (braceEscaped !== true && code === CHAR_COMMA) { + isBrace = token.isBrace = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_RIGHT_CURLY_BRACE) { + braces--; + + if (braces === 0) { + braceEscaped = false; + isBrace = token.isBrace = true; + finished = true; + break; + } + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (code === CHAR_FORWARD_SLASH) { + slashes.push(index); + tokens.push(token); + token = { value: '', depth: 0, isGlob: false }; + + if (finished === true) continue; + if (prev === CHAR_DOT && index === (start + 1)) { + start += 2; + continue; + } + + lastIndex = index + 1; + continue; + } + + if (opts.noext !== true) { + const isExtglobChar = code === CHAR_PLUS + || code === CHAR_AT + || code === CHAR_ASTERISK + || code === CHAR_QUESTION_MARK + || code === CHAR_EXCLAMATION_MARK; + + if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + isExtglob = token.isExtglob = true; + finished = true; + if (code === CHAR_EXCLAMATION_MARK && index === start) { + negatedExtglob = true; + } + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + isGlob = token.isGlob = true; + finished = true; + break; + } + } + continue; + } + break; + } + } + + if (code === CHAR_ASTERISK) { + if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_QUESTION_MARK) { + isGlob = token.isGlob = true; + finished = true; + + if (scanToEnd === true) { + continue; + } + break; + } + + if (code === CHAR_LEFT_SQUARE_BRACKET) { + while (eos() !== true && (next = advance())) { + if (next === CHAR_BACKWARD_SLASH) { + backslashes = token.backslashes = true; + advance(); + continue; + } + + if (next === CHAR_RIGHT_SQUARE_BRACKET) { + isBracket = token.isBracket = true; + isGlob = token.isGlob = true; + finished = true; + break; + } + } + + if (scanToEnd === true) { + continue; + } + + break; + } + + if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { + negated = token.negated = true; + start++; + continue; + } + + if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { + isGlob = token.isGlob = true; + + if (scanToEnd === true) { + while (eos() !== true && (code = advance())) { + if (code === CHAR_LEFT_PARENTHESES) { + backslashes = token.backslashes = true; + code = advance(); + continue; + } + + if (code === CHAR_RIGHT_PARENTHESES) { + finished = true; + break; + } + } + continue; + } + break; + } + + if (isGlob === true) { + finished = true; + + if (scanToEnd === true) { + continue; + } + + break; + } + } + + if (opts.noext === true) { + isExtglob = false; + isGlob = false; + } + + let base = str; + let prefix = ''; + let glob = ''; + + if (start > 0) { + prefix = str.slice(0, start); + str = str.slice(start); + lastIndex -= start; + } + + if (base && isGlob === true && lastIndex > 0) { + base = str.slice(0, lastIndex); + glob = str.slice(lastIndex); + } else if (isGlob === true) { + base = ''; + glob = str; + } else { + base = str; + } + + if (base && base !== '' && base !== '/' && base !== str) { + if (isPathSeparator(base.charCodeAt(base.length - 1))) { + base = base.slice(0, -1); + } + } + + if (opts.unescape === true) { + if (glob) glob = utils.removeBackslashes(glob); + + if (base && backslashes === true) { + base = utils.removeBackslashes(base); + } + } + + const state = { + prefix, + input, + start, + base, + glob, + isBrace, + isBracket, + isGlob, + isExtglob, + isGlobstar, + negated, + negatedExtglob + }; + + if (opts.tokens === true) { + state.maxDepth = 0; + if (!isPathSeparator(code)) { + tokens.push(token); + } + state.tokens = tokens; + } + + if (opts.parts === true || opts.tokens === true) { + let prevIndex; + + for (let idx = 0; idx < slashes.length; idx++) { + const n = prevIndex ? prevIndex + 1 : start; + const i = slashes[idx]; + const value = input.slice(n, i); + if (opts.tokens) { + if (idx === 0 && start !== 0) { + tokens[idx].isPrefix = true; + tokens[idx].value = prefix; + } else { + tokens[idx].value = value; + } + depth(tokens[idx]); + state.maxDepth += tokens[idx].depth; + } + if (idx !== 0 || value !== '') { + parts.push(value); + } + prevIndex = i; + } + + if (prevIndex && prevIndex + 1 < input.length) { + const value = input.slice(prevIndex + 1); + parts.push(value); + + if (opts.tokens) { + tokens[tokens.length - 1].value = value; + depth(tokens[tokens.length - 1]); + state.maxDepth += tokens[tokens.length - 1].depth; + } + } + + state.slashes = slashes; + state.parts = parts; + } + + return state; +}; + +module.exports = scan; diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js new file mode 100644 index 0000000..c3ca766 --- /dev/null +++ b/node_modules/picomatch/lib/utils.js @@ -0,0 +1,64 @@ +'use strict'; + +const path = require('path'); +const win32 = process.platform === 'win32'; +const { + REGEX_BACKSLASH, + REGEX_REMOVE_BACKSLASH, + REGEX_SPECIAL_CHARS, + REGEX_SPECIAL_CHARS_GLOBAL +} = require('./constants'); + +exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); +exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); +exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); +exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); +exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); + +exports.removeBackslashes = str => { + return str.replace(REGEX_REMOVE_BACKSLASH, match => { + return match === '\\' ? '' : match; + }); +}; + +exports.supportsLookbehinds = () => { + const segs = process.version.slice(1).split('.').map(Number); + if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { + return true; + } + return false; +}; + +exports.isWindows = options => { + if (options && typeof options.windows === 'boolean') { + return options.windows; + } + return win32 === true || path.sep === '\\'; +}; + +exports.escapeLast = (input, char, lastIdx) => { + const idx = input.lastIndexOf(char, lastIdx); + if (idx === -1) return input; + if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); + return `${input.slice(0, idx)}\\${input.slice(idx)}`; +}; + +exports.removePrefix = (input, state = {}) => { + let output = input; + if (output.startsWith('./')) { + output = output.slice(2); + state.prefix = './'; + } + return output; +}; + +exports.wrapOutput = (input, state = {}, options = {}) => { + const prepend = options.contains ? '' : '^'; + const append = options.contains ? '' : '$'; + + let output = `${prepend}(?:${input})${append}`; + if (state.negated === true) { + output = `(?:^(?!${output}).*$)`; + } + return output; +}; diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json new file mode 100644 index 0000000..3db22d4 --- /dev/null +++ b/node_modules/picomatch/package.json @@ -0,0 +1,81 @@ +{ + "name": "picomatch", + "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", + "version": "2.3.1", + "homepage": "https://github.com/micromatch/picomatch", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "funding": "https://github.com/sponsors/jonschlinkert", + "repository": "micromatch/picomatch", + "bugs": { + "url": "https://github.com/micromatch/picomatch/issues" + }, + "license": "MIT", + "files": [ + "index.js", + "lib" + ], + "main": "index.js", + "engines": { + "node": ">=8.6" + }, + "scripts": { + "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", + "mocha": "mocha --reporter dot", + "test": "npm run lint && npm run mocha", + "test:ci": "npm run test:cover", + "test:cover": "nyc npm run mocha" + }, + "devDependencies": { + "eslint": "^6.8.0", + "fill-range": "^7.0.1", + "gulp-format-md": "^2.0.0", + "mocha": "^6.2.2", + "nyc": "^15.0.0", + "time-require": "github:jonschlinkert/time-require" + }, + "keywords": [ + "glob", + "match", + "picomatch" + ], + "nyc": { + "reporter": [ + "html", + "lcov", + "text-summary" + ] + }, + "verb": { + "toc": { + "render": true, + "method": "preWrite", + "maxdepth": 3 + }, + "layout": "empty", + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "related": { + "list": [ + "braces", + "micromatch" + ] + }, + "reflinks": [ + "braces", + "expand-brackets", + "extglob", + "fill-range", + "micromatch", + "minimatch", + "nanomatch", + "picomatch" + ] + } +} diff --git a/node_modules/queue-microtask/LICENSE b/node_modules/queue-microtask/LICENSE new file mode 100755 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/queue-microtask/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/queue-microtask/README.md b/node_modules/queue-microtask/README.md new file mode 100644 index 0000000..0be05a6 --- /dev/null +++ b/node_modules/queue-microtask/README.md @@ -0,0 +1,90 @@ +# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master +[ci-url]: https://github.com/feross/queue-microtask/actions +[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg +[npm-url]: https://npmjs.org/package/queue-microtask +[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg +[downloads-url]: https://npmjs.org/package/queue-microtask +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines + +- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. +- No dependencies. Less than 10 lines. No shims or complicated fallbacks. +- Optimal performance in all modern environments + - Uses `queueMicrotask` in modern environments + - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) + +## install + +``` +npm install queue-microtask +``` + +## usage + +```js +const queueMicrotask = require('queue-microtask') + +queueMicrotask(() => { /* this will run soon */ }) +``` + +## What is `queueMicrotask` and why would one use it? + +The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. + +A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. + +The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. + +Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. + +See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. + +## Who is this package for? + +This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. + +If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. + +## Why not use `process.nextTick`? + +In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. + +You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. + +## Why not use `setTimeout(fn, 0)`? + +This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. + +## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? + +These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. + +This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. + +Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. + +Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. + +## What is a shim? + +> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) + +This package could also be described as a "ponyfill". + +> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) + +## API + +### `queueMicrotask(fn)` + +The `queueMicrotask()` method queues a microtask. + +The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/queue-microtask/index.d.ts b/node_modules/queue-microtask/index.d.ts new file mode 100644 index 0000000..b6a8646 --- /dev/null +++ b/node_modules/queue-microtask/index.d.ts @@ -0,0 +1,2 @@ +declare const queueMicrotask: (cb: () => void) => void +export = queueMicrotask diff --git a/node_modules/queue-microtask/index.js b/node_modules/queue-microtask/index.js new file mode 100644 index 0000000..5560534 --- /dev/null +++ b/node_modules/queue-microtask/index.js @@ -0,0 +1,9 @@ +/*! queue-microtask. MIT License. Feross Aboukhadijeh */ +let promise + +module.exports = typeof queueMicrotask === 'function' + ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) + // reuse resolved promise, and allocate it lazily + : cb => (promise || (promise = Promise.resolve())) + .then(cb) + .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/queue-microtask/package.json b/node_modules/queue-microtask/package.json new file mode 100644 index 0000000..d29a401 --- /dev/null +++ b/node_modules/queue-microtask/package.json @@ -0,0 +1,55 @@ +{ + "name": "queue-microtask", + "description": "fast, tiny `queueMicrotask` shim for modern engines", + "version": "1.2.3", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/queue-microtask/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.2.2" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/queue-microtask", + "keywords": [ + "asap", + "immediate", + "micro task", + "microtask", + "nextTick", + "process.nextTick", + "queue micro task", + "queue microtask", + "queue-microtask", + "queueMicrotask", + "setImmediate", + "task" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/queue-microtask.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/reusify/.github/dependabot.yml b/node_modules/reusify/.github/dependabot.yml new file mode 100644 index 0000000..4872c5a --- /dev/null +++ b/node_modules/reusify/.github/dependabot.yml @@ -0,0 +1,7 @@ +version: 2 +updates: +- package-ecosystem: npm + directory: "/" + schedule: + interval: daily + open-pull-requests-limit: 10 diff --git a/node_modules/reusify/.github/workflows/ci.yml b/node_modules/reusify/.github/workflows/ci.yml new file mode 100644 index 0000000..1e30ad8 --- /dev/null +++ b/node_modules/reusify/.github/workflows/ci.yml @@ -0,0 +1,96 @@ +name: ci + +on: [push, pull_request] + +jobs: + legacy: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install --production && npm install tape + + - name: Run tests + run: | + npm run test + + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [18.x, 20.x, 22.x] + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + + - name: Install + run: | + npm install + + - name: Run tests + run: | + npm run test:coverage + + types: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install + run: | + npm install + + - name: Run types tests + run: | + npm run test:typescript + + lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Use Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + + - name: Install + run: | + npm install + + - name: Lint + run: | + npm run lint diff --git a/node_modules/reusify/LICENSE b/node_modules/reusify/LICENSE new file mode 100644 index 0000000..56d1590 --- /dev/null +++ b/node_modules/reusify/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015-2024 Matteo Collina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/reusify/README.md b/node_modules/reusify/README.md new file mode 100644 index 0000000..1aaee5d --- /dev/null +++ b/node_modules/reusify/README.md @@ -0,0 +1,139 @@ +# reusify + +[![npm version][npm-badge]][npm-url] + +Reuse your objects and functions for maximum speed. This technique will +make any function run ~10% faster. You call your functions a +lot, and it adds up quickly in hot code paths. + +``` +$ node benchmarks/createNoCodeFunction.js +Total time 53133 +Total iterations 100000000 +Iteration/s 1882069.5236482036 + +$ node benchmarks/reuseNoCodeFunction.js +Total time 50617 +Total iterations 100000000 +Iteration/s 1975620.838848608 +``` + +The above benchmark uses fibonacci to simulate a real high-cpu load. +The actual numbers might differ for your use case, but the difference +should not. + +The benchmark was taken using Node v6.10.0. + +This library was extracted from +[fastparallel](http://npm.im/fastparallel). + +## Example + +```js +var reusify = require('reusify') +var fib = require('reusify/benchmarks/fib') +var instance = reusify(MyObject) + +// get an object from the cache, +// or creates a new one when cache is empty +var obj = instance.get() + +// set the state +obj.num = 100 +obj.func() + +// reset the state. +// if the state contains any external object +// do not use delete operator (it is slow) +// prefer set them to null +obj.num = 0 + +// store an object in the cache +instance.release(obj) + +function MyObject () { + // you need to define this property + // so V8 can compile MyObject into an + // hidden class + this.next = null + this.num = 0 + + var that = this + + // this function is never reallocated, + // so it can be optimized by V8 + this.func = function () { + if (null) { + // do nothing + } else { + // calculates fibonacci + fib(that.num) + } + } +} +``` + +The above example was intended for synchronous code, let's see async: +```js +var reusify = require('reusify') +var instance = reusify(MyObject) + +for (var i = 0; i < 100; i++) { + getData(i, console.log) +} + +function getData (value, cb) { + var obj = instance.get() + + obj.value = value + obj.cb = cb + obj.run() +} + +function MyObject () { + this.next = null + this.value = null + + var that = this + + this.run = function () { + asyncOperation(that.value, that.handle) + } + + this.handle = function (err, result) { + that.cb(err, result) + that.value = null + that.cb = null + instance.release(that) + } +} +``` + +Also note how in the above examples, the code, that consumes an instance of `MyObject`, +reset the state to initial condition, just before storing it in the cache. +That's needed so that every subsequent request for an instance from the cache, +could get a clean instance. + +## Why + +It is faster because V8 doesn't have to collect all the functions you +create. On a short-lived benchmark, it is as fast as creating the +nested function, but on a longer time frame it creates less +pressure on the garbage collector. + +## Other examples +If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). + +## Acknowledgements + +Thanks to [Trevor Norris](https://github.com/trevnorris) for +getting me down the rabbit hole of performance, and thanks to [Mathias +Buss](http://github.com/mafintosh) for suggesting me to share this +trick. + +## License + +MIT + +[npm-badge]: https://badge.fury.io/js/reusify.svg +[npm-url]: https://badge.fury.io/js/reusify diff --git a/node_modules/reusify/SECURITY.md b/node_modules/reusify/SECURITY.md new file mode 100644 index 0000000..dd9f1d5 --- /dev/null +++ b/node_modules/reusify/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 1.x | :white_check_mark: | +| < 1.0 | :x: | + +## Reporting a Vulnerability + +Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/reusify/benchmarks/createNoCodeFunction.js new file mode 100644 index 0000000..ce1aac7 --- /dev/null +++ b/node_modules/reusify/benchmarks/createNoCodeFunction.js @@ -0,0 +1,30 @@ +'use strict' + +var fib = require('./fib') +var max = 100000000 +var start = Date.now() + +// create a funcion with the typical error +// pattern, that delegates the heavy load +// to something else +function createNoCodeFunction () { + /* eslint no-constant-condition: "off" */ + var num = 100 + + ;(function () { + if (null) { + // do nothing + } else { + fib(num) + } + })() +} + +for (var i = 0; i < max; i++) { + createNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/benchmarks/fib.js b/node_modules/reusify/benchmarks/fib.js new file mode 100644 index 0000000..e22cc48 --- /dev/null +++ b/node_modules/reusify/benchmarks/fib.js @@ -0,0 +1,13 @@ +'use strict' + +function fib (num) { + var fib = [] + + fib[0] = 0 + fib[1] = 1 + for (var i = 2; i <= num; i++) { + fib[i] = fib[i - 2] + fib[i - 1] + } +} + +module.exports = fib diff --git a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js new file mode 100644 index 0000000..3358d6e --- /dev/null +++ b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js @@ -0,0 +1,38 @@ +'use strict' + +var reusify = require('../') +var fib = require('./fib') +var instance = reusify(MyObject) +var max = 100000000 +var start = Date.now() + +function reuseNoCodeFunction () { + var obj = instance.get() + obj.num = 100 + obj.func() + obj.num = 0 + instance.release(obj) +} + +function MyObject () { + this.next = null + var that = this + this.num = 0 + this.func = function () { + /* eslint no-constant-condition: "off" */ + if (null) { + // do nothing + } else { + fib(that.num) + } + } +} + +for (var i = 0; i < max; i++) { + reuseNoCodeFunction() +} + +var time = Date.now() - start +console.log('Total time', time) +console.log('Total iterations', max) +console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/eslint.config.js b/node_modules/reusify/eslint.config.js new file mode 100644 index 0000000..d0a9af6 --- /dev/null +++ b/node_modules/reusify/eslint.config.js @@ -0,0 +1,14 @@ +'use strict' + +const base = require('neostandard')({}) + +module.exports = [ + ...base, + { + name: 'old-standard', + rules: { + 'no-var': 'off', + 'object-shorthand': 'off', + } + } +] diff --git a/node_modules/reusify/package.json b/node_modules/reusify/package.json new file mode 100644 index 0000000..e47ff11 --- /dev/null +++ b/node_modules/reusify/package.json @@ -0,0 +1,50 @@ +{ + "name": "reusify", + "version": "1.1.0", + "description": "Reuse objects and functions with style", + "main": "reusify.js", + "types": "reusify.d.ts", + "scripts": { + "lint": "eslint", + "test": "tape test.js", + "test:coverage": "c8 --100 tape test.js", + "test:typescript": "tsc" + }, + "pre-commit": [ + "lint", + "test", + "test:typescript" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/reusify.git" + }, + "keywords": [ + "reuse", + "object", + "performance", + "function", + "fast" + ], + "author": "Matteo Collina ", + "license": "MIT", + "bugs": { + "url": "https://github.com/mcollina/reusify/issues" + }, + "homepage": "https://github.com/mcollina/reusify#readme", + "engines": { + "node": ">=0.10.0", + "iojs": ">=1.0.0" + }, + "devDependencies": { + "@types/node": "^22.9.0", + "eslint": "^9.13.0", + "neostandard": "^0.12.0", + "pre-commit": "^1.2.2", + "tape": "^5.0.0", + "c8": "^10.1.2", + "typescript": "^5.2.2" + }, + "dependencies": { + } +} diff --git a/node_modules/reusify/reusify.d.ts b/node_modules/reusify/reusify.d.ts new file mode 100644 index 0000000..9ba277d --- /dev/null +++ b/node_modules/reusify/reusify.d.ts @@ -0,0 +1,14 @@ +interface Node { + next: Node | null; +} + +interface Constructor { + new(): T; +} + +declare function reusify(constructor: Constructor): { + get(): T; + release(node: T): void; +}; + +export = reusify; diff --git a/node_modules/reusify/reusify.js b/node_modules/reusify/reusify.js new file mode 100644 index 0000000..e6f36f3 --- /dev/null +++ b/node_modules/reusify/reusify.js @@ -0,0 +1,33 @@ +'use strict' + +function reusify (Constructor) { + var head = new Constructor() + var tail = head + + function get () { + var current = head + + if (current.next) { + head = current.next + } else { + head = new Constructor() + tail = head + } + + current.next = null + + return current + } + + function release (obj) { + tail.next = obj + tail = obj + } + + return { + get: get, + release: release + } +} + +module.exports = reusify diff --git a/node_modules/reusify/test.js b/node_modules/reusify/test.js new file mode 100644 index 0000000..929cfd7 --- /dev/null +++ b/node_modules/reusify/test.js @@ -0,0 +1,66 @@ +'use strict' + +var test = require('tape') +var reusify = require('./') + +test('reuse objects', function (t) { + t.plan(6) + + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + + t.notEqual(obj, instance.get(), 'two instance created') + t.notOk(obj.next, 'next must be null') + + instance.release(obj) + + // the internals keeps a hot copy ready for reuse + // putting this one back in the queue + instance.release(instance.get()) + + // comparing the old one with the one we got + // never do this in real code, after release you + // should never reuse that instance + t.equal(obj, instance.get(), 'instance must be reused') +}) + +test('reuse more than 2 objects', function (t) { + function MyObject () { + t.pass('constructor called') + this.next = null + } + + var instance = reusify(MyObject) + var obj = instance.get() + var obj2 = instance.get() + var obj3 = instance.get() + + t.notOk(obj.next, 'next must be null') + t.notOk(obj2.next, 'next must be null') + t.notOk(obj3.next, 'next must be null') + + t.notEqual(obj, obj2) + t.notEqual(obj, obj3) + t.notEqual(obj3, obj2) + + instance.release(obj) + instance.release(obj2) + instance.release(obj3) + + // skip one + instance.get() + + var obj4 = instance.get() + var obj5 = instance.get() + var obj6 = instance.get() + + t.equal(obj4, obj) + t.equal(obj5, obj2) + t.equal(obj6, obj3) + t.end() +}) diff --git a/node_modules/reusify/tsconfig.json b/node_modules/reusify/tsconfig.json new file mode 100644 index 0000000..dbe862b --- /dev/null +++ b/node_modules/reusify/tsconfig.json @@ -0,0 +1,11 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "noEmit": true, + "strict": true + }, + "files": [ + "./reusify.d.ts" + ] +} diff --git a/node_modules/run-parallel/LICENSE b/node_modules/run-parallel/LICENSE new file mode 100644 index 0000000..c7e6852 --- /dev/null +++ b/node_modules/run-parallel/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/run-parallel/README.md b/node_modules/run-parallel/README.md new file mode 100644 index 0000000..edc3da4 --- /dev/null +++ b/node_modules/run-parallel/README.md @@ -0,0 +1,85 @@ +# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg +[travis-url]: https://travis-ci.org/feross/run-parallel +[npm-image]: https://img.shields.io/npm/v/run-parallel.svg +[npm-url]: https://npmjs.org/package/run-parallel +[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg +[downloads-url]: https://npmjs.org/package/run-parallel +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Run an array of functions in parallel + +![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) + +### install + +``` +npm install run-parallel +``` + +### usage + +#### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its callback, the main +`callback` is immediately called with the value of the error. Once the `tasks` have +completed, the results are passed to the final `callback` as an array. + +It is also possible to use an object instead of an array. Each property will be run as a +function and the results will be passed to the final `callback` as an object instead of +an array. This can be a more readable way of handling the results. + +##### arguments + +- `tasks` - An array or object containing functions to run. Each function is passed a +`callback(err, result)` which it must call on completion with an error `err` (which can +be `null`) and an optional `result` value. +- `callback(err, results)` - An optional callback to run once all the functions have +completed. This function gets a results array (or object) containing all the result +arguments passed to the task callbacks. + +##### example + +```js +var parallel = require('run-parallel') + +parallel([ + function (callback) { + setTimeout(function () { + callback(null, 'one') + }, 200) + }, + function (callback) { + setTimeout(function () { + callback(null, 'two') + }, 100) + } +], +// optional callback +function (err, results) { + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}) +``` + +This module is basically equavalent to +[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's +handy to just have the one function you need instead of the kitchen sink. Modularity! +Especially handy if you're serving to the browser and need to reduce your javascript +bundle size. + +Works great in the browser with [browserify](http://browserify.org/)! + +### see also + +- [run-auto](https://github.com/feross/run-auto) +- [run-parallel-limit](https://github.com/feross/run-parallel-limit) +- [run-series](https://github.com/feross/run-series) +- [run-waterfall](https://github.com/feross/run-waterfall) + +### license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/run-parallel/index.js b/node_modules/run-parallel/index.js new file mode 100644 index 0000000..6307141 --- /dev/null +++ b/node_modules/run-parallel/index.js @@ -0,0 +1,51 @@ +/*! run-parallel. MIT License. Feross Aboukhadijeh */ +module.exports = runParallel + +const queueMicrotask = require('queue-microtask') + +function runParallel (tasks, cb) { + let results, pending, keys + let isSync = true + + if (Array.isArray(tasks)) { + results = [] + pending = tasks.length + } else { + keys = Object.keys(tasks) + results = {} + pending = keys.length + } + + function done (err) { + function end () { + if (cb) cb(err, results) + cb = null + } + if (isSync) queueMicrotask(end) + else end() + } + + function each (i, err, result) { + results[i] = result + if (--pending === 0 || err) { + done(err) + } + } + + if (!pending) { + // empty + done(null) + } else if (keys) { + // object + keys.forEach(function (key) { + tasks[key](function (err, result) { each(key, err, result) }) + }) + } else { + // array + tasks.forEach(function (task, i) { + task(function (err, result) { each(i, err, result) }) + }) + } + + isSync = false +} diff --git a/node_modules/run-parallel/package.json b/node_modules/run-parallel/package.json new file mode 100644 index 0000000..1f14757 --- /dev/null +++ b/node_modules/run-parallel/package.json @@ -0,0 +1,58 @@ +{ + "name": "run-parallel", + "description": "Run an array of functions in parallel", + "version": "1.2.0", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/run-parallel/issues" + }, + "dependencies": { + "queue-microtask": "^1.2.2" + }, + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/feross/run-parallel", + "keywords": [ + "parallel", + "async", + "function", + "callback", + "asynchronous", + "run", + "array", + "run parallel" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/run-parallel.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/shebang-command/index.js b/node_modules/shebang-command/index.js new file mode 100644 index 0000000..f35db30 --- /dev/null +++ b/node_modules/shebang-command/index.js @@ -0,0 +1,19 @@ +'use strict'; +const shebangRegex = require('shebang-regex'); + +module.exports = (string = '') => { + const match = string.match(shebangRegex); + + if (!match) { + return null; + } + + const [path, argument] = match[0].replace(/#! ?/, '').split(' '); + const binary = path.split('/').pop(); + + if (binary === 'env') { + return argument; + } + + return argument ? `${binary} ${argument}` : binary; +}; diff --git a/node_modules/shebang-command/license b/node_modules/shebang-command/license new file mode 100644 index 0000000..db6bc32 --- /dev/null +++ b/node_modules/shebang-command/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Kevin Mårtensson (github.com/kevva) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-command/package.json b/node_modules/shebang-command/package.json new file mode 100644 index 0000000..18e3c04 --- /dev/null +++ b/node_modules/shebang-command/package.json @@ -0,0 +1,34 @@ +{ + "name": "shebang-command", + "version": "2.0.0", + "description": "Get the command from a shebang", + "license": "MIT", + "repository": "kevva/shebang-command", + "author": { + "name": "Kevin Mårtensson", + "email": "kevinmartensson@gmail.com", + "url": "github.com/kevva" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "cmd", + "command", + "parse", + "shebang" + ], + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "devDependencies": { + "ava": "^2.3.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/shebang-command/readme.md b/node_modules/shebang-command/readme.md new file mode 100644 index 0000000..84feb44 --- /dev/null +++ b/node_modules/shebang-command/readme.md @@ -0,0 +1,34 @@ +# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) + +> Get the command from a shebang + + +## Install + +``` +$ npm install shebang-command +``` + + +## Usage + +```js +const shebangCommand = require('shebang-command'); + +shebangCommand('#!/usr/bin/env node'); +//=> 'node' + +shebangCommand('#!/bin/bash'); +//=> 'bash' +``` + + +## API + +### shebangCommand(string) + +#### string + +Type: `string` + +String containing a shebang. diff --git a/node_modules/shebang-regex/index.d.ts b/node_modules/shebang-regex/index.d.ts new file mode 100644 index 0000000..61d034b --- /dev/null +++ b/node_modules/shebang-regex/index.d.ts @@ -0,0 +1,22 @@ +/** +Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. + +@example +``` +import shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` +*/ +declare const shebangRegex: RegExp; + +export = shebangRegex; diff --git a/node_modules/shebang-regex/index.js b/node_modules/shebang-regex/index.js new file mode 100644 index 0000000..63fc4a0 --- /dev/null +++ b/node_modules/shebang-regex/index.js @@ -0,0 +1,2 @@ +'use strict'; +module.exports = /^#!(.*)/; diff --git a/node_modules/shebang-regex/license b/node_modules/shebang-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/shebang-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-regex/package.json b/node_modules/shebang-regex/package.json new file mode 100644 index 0000000..00ab30f --- /dev/null +++ b/node_modules/shebang-regex/package.json @@ -0,0 +1,35 @@ +{ + "name": "shebang-regex", + "version": "3.0.0", + "description": "Regular expression for matching a shebang line", + "license": "MIT", + "repository": "sindresorhus/shebang-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "regex", + "regexp", + "shebang", + "match", + "test", + "line" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/shebang-regex/readme.md b/node_modules/shebang-regex/readme.md new file mode 100644 index 0000000..5ecf863 --- /dev/null +++ b/node_modules/shebang-regex/readme.md @@ -0,0 +1,33 @@ +# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) + +> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line + + +## Install + +``` +$ npm install shebang-regex +``` + + +## Usage + +```js +const shebangRegex = require('shebang-regex'); + +const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; + +shebangRegex.test(string); +//=> true + +shebangRegex.exec(string)[0]; +//=> '#!/usr/bin/env node' + +shebangRegex.exec(string)[1]; +//=> '/usr/bin/env node' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/shelljs/LICENSE b/node_modules/shelljs/LICENSE new file mode 100644 index 0000000..40a2bf6 --- /dev/null +++ b/node_modules/shelljs/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2012, Artur Adib +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/shelljs/README.md b/node_modules/shelljs/README.md new file mode 100644 index 0000000..a0de676 --- /dev/null +++ b/node_modules/shelljs/README.md @@ -0,0 +1,949 @@ +# ShellJS - Unix shell commands for Node.js + +[![GitHub Actions](https://img.shields.io/github/actions/workflow/status/shelljs/shelljs/main.yml?style=flat-square&logo=github)](https://github.com/shelljs/shelljs/actions/workflows/main.yml) +[![Codecov](https://img.shields.io/codecov/c/github/shelljs/shelljs/main.svg?style=flat-square&label=coverage)](https://codecov.io/gh/shelljs/shelljs) +[![npm version](https://img.shields.io/npm/v/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) +[![npm downloads](https://img.shields.io/npm/dm/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) + +ShellJS is a portable **(Windows/Linux/macOS)** implementation of Unix shell +commands on top of the Node.js API. You can use it to eliminate your shell +script's dependency on Unix while still keeping its familiar and powerful +commands. You can also install it globally so you can run it from outside Node +projects - say goodbye to those gnarly Bash scripts! + +ShellJS is proudly tested on every LTS node release since `v18`! + +The project is unit-tested and battle-tested in projects like: + ++ [Firebug](http://getfirebug.com/) - Firefox's infamous debugger ++ [JSHint](http://jshint.com) & [ESLint](http://eslint.org/) - popular JavaScript linters ++ [Zepto](http://zeptojs.com) - jQuery-compatible JavaScript library for modern browsers ++ [Yeoman](http://yeoman.io/) - Web application stack and development tool ++ [Deployd.com](http://deployd.com) - Open source PaaS for quick API backend generation ++ And [many more](https://npmjs.org/browse/depended/shelljs). + +If you have feedback, suggestions, or need help, feel free to post in our [issue +tracker](https://github.com/shelljs/shelljs/issues). + +Think ShellJS is cool? Check out some related projects in our [Wiki +page](https://github.com/shelljs/shelljs/wiki)! + +Upgrading from an older version? Check out our [breaking +changes](https://github.com/shelljs/shelljs/wiki/Breaking-Changes) page to see +what changes to watch out for while upgrading. + +## Command line use + +If you just want cross platform UNIX commands, checkout our new project +[shelljs/shx](https://github.com/shelljs/shx), a utility to expose `shelljs` to +the command line. + +For example: + +``` +$ shx mkdir -p foo +$ shx touch foo/bar.txt +$ shx rm -rf foo +``` + +## Plugin API + +ShellJS now supports third-party plugins! You can learn more about using plugins +and writing your own ShellJS commands in [the +wiki](https://github.com/shelljs/shelljs/wiki/Using-ShellJS-Plugins). + +## A quick note about the docs + +For documentation on all the latest features, check out our +[README](https://github.com/shelljs/shelljs). To read docs that are consistent +with the latest release, check out [the npm +page](https://www.npmjs.com/package/shelljs). + +## Installing + +Via npm: + +```bash +$ npm install [-g] shelljs +``` + +## Examples + +```javascript +var shell = require('shelljs'); + +if (!shell.which('git')) { + shell.echo('Sorry, this script requires git'); + shell.exit(1); +} + +// Copy files to release dir +shell.rm('-rf', 'out/Release'); +shell.cp('-R', 'stuff/', 'out/Release'); + +// Replace macros in each .js file +shell.cd('lib'); +shell.ls('*.js').forEach(function (file) { + shell.sed('-i', 'BUILD_VERSION', 'v0.1.2', file); + shell.sed('-i', /^.*REMOVE_THIS_LINE.*$/, '', file); + shell.sed('-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, shell.cat('macro.js'), file); +}); +shell.cd('..'); + +// Run external tool synchronously +if (shell.exec('git commit -am "Auto-commit"').code !== 0) { + shell.echo('Error: Git commit failed'); + shell.exit(1); +} +``` + +## Exclude options + +If you need to pass a parameter that looks like an option, you can do so like: + +```js +shell.grep('--', '-v', 'path/to/file'); // Search for "-v", no grep options + +shell.cp('-R', '-dir', 'outdir'); // If already using an option, you're done +``` + +## Global vs. Local + +We no longer recommend using a global-import for ShellJS (i.e. +`require('shelljs/global')`). While still supported for convenience, this +pollutes the global namespace, and should therefore only be used with caution. + +Instead, we recommend a local import (standard for npm packages): + +```javascript +var shell = require('shelljs'); +shell.echo('hello world'); +``` + +Alternatively, we also support importing as a module with: + +```javascript +import shell from 'shelljs'; +shell.echo('hello world'); +``` + + + + +## Command reference + + +All commands run synchronously, unless otherwise stated. +All commands accept standard bash globbing characters (`*`, `?`, etc.), +compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). + +For less-commonly used commands and features, please check out our [wiki +page](https://github.com/shelljs/shelljs/wiki). + + +### cat([options,] file [, file ...]) +### cat([options,] file_array) + +Available options: + ++ `-n`: number all output lines + +Examples: + +```javascript +var str = cat('file*.txt'); +var str = cat('file1', 'file2'); +var str = cat(['file1', 'file2']); // same as above +``` + +Returns a [ShellString](#shellstringstr) containing the given file, or a +concatenated string containing the files if more than one file is given (a +new line character is introduced between each file). + + +### cd([dir]) + +Changes to directory `dir` for the duration of the script. Changes to home +directory if no argument is supplied. Returns a +[ShellString](#shellstringstr) to indicate success or failure. + + +### chmod([options,] octal_mode || octal_string, file) +### chmod([options,] symbolic_mode, file) + +Available options: + ++ `-v`: output a diagnostic for every file processed ++ `-c`: like verbose, but report only when a change is made ++ `-R`: change files and directories recursively + +Examples: + +```javascript +chmod(755, '/Users/brandon'); +chmod('755', '/Users/brandon'); // same as above +chmod('u+x', '/Users/brandon'); +chmod('-R', 'a-w', '/Users/brandon'); +``` + +Alters the permissions of a file or directory by either specifying the +absolute permissions in octal form or expressing the changes in symbols. +This command tries to mimic the POSIX behavior as much as possible. +Notable exceptions: + ++ In symbolic modes, `a-r` and `-r` are identical. No consideration is + given to the `umask`. ++ There is no "quiet" option, since default behavior is to run silent. ++ Windows OS uses a very different permission model than POSIX. `chmod()` + does its best on Windows, but there are limits to how file permissions can + be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, + so cross-platform compatibility should not be a concern there. + +Returns a [ShellString](#shellstringstr) indicating success or failure. + + +### cmd(arg1[, arg2, ...] [, options]) + +Available options: + ++ `cwd: directoryPath`: change the current working directory only for this + cmd() invocation. ++ `maxBuffer: num`: Raise or decrease the default buffer size for + stdout/stderr. ++ `timeout`: Change the default timeout. + +Examples: + +```javascript +var version = cmd('node', '--version').stdout; +cmd('git', 'commit', '-am', `Add suport for node ${version}`); +console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) +console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) +``` + +Executes the given command synchronously. This is intended as an easier +alternative for [exec()](#execcommand--options--callback), with better +security around globbing, comamnd injection, and variable expansion. This is +guaranteed to only run one external command, and won't give special +treatment for any shell characters (ex. this treats `|` as a literal +character, not as a shell pipeline). +This returns a [ShellString](#shellstringstr). + +By default, this performs globbing on all platforms, but you can disable +this with `set('-f')`. + +This **does not** support asynchronous mode. If you need asynchronous +command execution, check out [execa](https://www.npmjs.com/package/execa) or +the node builtin `child_process.execFile()` instead. + + +### cp([options,] source [, source ...], dest) +### cp([options,] source_array, dest) + +Available options: + ++ `-f`: force (default behavior) ++ `-n`: no-clobber ++ `-u`: only copy if `source` is newer than `dest` ++ `-r`, `-R`: recursive ++ `-L`: follow symlinks ++ `-P`: don't follow symlinks ++ `-p`: preserve file mode, ownership, and timestamps + +Examples: + +```javascript +cp('file1', 'dir1'); +cp('-R', 'path/to/dir/', '~/newCopy/'); +cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); +cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above +``` + +Copies files. Returns a [ShellString](#shellstringstr) indicating success +or failure. + + +### pushd([options,] [dir | '-N' | '+N']) + +Available options: + ++ `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. ++ `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. ++ `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. + +Examples: + +```javascript +// process.cwd() === '/usr' +pushd('/etc'); // Returns /etc /usr +pushd('+1'); // Returns /usr /etc +``` + +Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. + + +### popd([options,] ['-N' | '+N']) + +Available options: + ++ `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. ++ `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. + +Examples: + +```javascript +echo(process.cwd()); // '/usr' +pushd('/etc'); // '/etc /usr' +echo(process.cwd()); // '/etc' +popd(); // '/usr' +echo(process.cwd()); // '/usr' +``` + +When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. + + +### dirs([options | '+N' | '-N']) + +Available options: + ++ `-c`: Clears the directory stack by deleting all of the elements. ++ `-q`: Suppresses output to the console. + +Arguments: + ++ `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. ++ `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. + +Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. + +See also: `pushd`, `popd` + + +### echo([options,] string [, string ...]) + +Available options: + ++ `-e`: interpret backslash escapes (default) ++ `-n`: remove trailing newline from output + +Examples: + +```javascript +echo('hello world'); +var str = echo('hello world'); +echo('-n', 'no newline at end'); +``` + +Prints `string` to stdout, and returns a [ShellString](#shellstringstr). + + +### exec(command [, options] [, callback]) + +Available options: + ++ `async`: Asynchronous execution. If a callback is provided, it will be set to + `true`, regardless of the passed value (default: `false`). ++ `fatal`: Exit upon error (default: `false`). ++ `silent`: Do not echo program output to console (default: `false`). ++ `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and + what is written to stdout and stderr when not in silent mode (default: `'utf8'`). ++ and any option available to Node.js's + [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) + +Examples: + +```javascript +var version = exec('node --version', {silent:true}).stdout; + +var child = exec('some_long_running_process', {async:true}); +child.stdout.on('data', function(data) { + /* ... do something with data ... */ +}); + +exec('some_long_running_process', function(code, stdout, stderr) { + console.log('Exit code:', code); + console.log('Program output:', stdout); + console.log('Program stderr:', stderr); +}); +``` + +Executes the given `command` _synchronously_, unless otherwise specified. +When in synchronous mode, this returns a [ShellString](#shellstringstr). +Otherwise, this returns the child process object, and the `callback` +receives the arguments `(code, stdout, stderr)`. + +Not seeing the behavior you want? `exec()` runs everything through `sh` +by default (or `cmd.exe` on Windows), which differs from `bash`. If you +need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. + +**Security note:** as `shell.exec()` executes an arbitrary string in the +system shell, it is **critical** to properly sanitize user input to avoid +**command injection**. For more context, consult the [Security +Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). + + +### find(path [, path ...]) +### find(path_array) + +Examples: + +```javascript +find('src', 'lib'); +find(['src', 'lib']); // same as above +find('.').filter(function(file) { return file.match(/\.js$/); }); +``` + +Returns a [ShellString](#shellstringstr) (with array-like properties) of all +files (however deep) in the given paths. + +The main difference from `ls('-R', path)` is that the resulting file names +include the base directories (e.g., `lib/resources/file1` instead of just `file1`). + + +### grep([options,] regex_filter, file [, file ...]) +### grep([options,] regex_filter, file_array) + +Available options: + ++ `-v`: Invert `regex_filter` (only print non-matching lines). ++ `-l`: Print only filenames of matching files. ++ `-i`: Ignore case. ++ `-n`: Print line numbers. ++ `-B `: Show `` lines before each result. ++ `-A `: Show `` lines after each result. ++ `-C `: Show `` lines before and after each result. -B and -A override this option. + +Examples: + +```javascript +grep('-v', 'GLOBAL_VARIABLE', '*.js'); +grep('GLOBAL_VARIABLE', '*.js'); +grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); +grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); +grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); +``` + +Reads input string from given files and returns a +[ShellString](#shellstringstr) containing all lines of the @ file that match +the given `regex_filter`. + + +### head([{'-n': \},] file [, file ...]) +### head([{'-n': \},] file_array) + +Available options: + ++ `-n `: Show the first `` lines of the files + +Examples: + +```javascript +var str = head({'-n': 1}, 'file*.txt'); +var str = head('file1', 'file2'); +var str = head(['file1', 'file2']); // same as above +``` + +Read the start of a `file`. Returns a [ShellString](#shellstringstr). + + +### ln([options,] source, dest) + +Available options: + ++ `-s`: symlink ++ `-f`: force + +Examples: + +```javascript +ln('file', 'newlink'); +ln('-sf', 'file', 'existing'); +``` + +Links `source` to `dest`. Use `-f` to force the link, should `dest` already +exist. Returns a [ShellString](#shellstringstr) indicating success or +failure. + + +### ls([options,] [path, ...]) +### ls([options,] path_array) + +Available options: + ++ `-R`: recursive ++ `-A`: all files (include files beginning with `.`, except for `.` and `..`) ++ `-L`: follow symlinks ++ `-d`: list directories themselves, not their contents ++ `-l`: provides more details for each file. Specifically, each file is + represented by a structured object with separate fields for file + metadata (see + [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The + return value also overrides `.toString()` to resemble `ls -l`'s + output format for human readability, but programmatic usage should + depend on the stable object format rather than the `.toString()` + representation. + +Examples: + +```javascript +ls('projs/*.js'); +ls('projs/**/*.js'); // Find all js files recursively in projs +ls('-R', '/users/me', '/tmp'); +ls('-R', ['/users/me', '/tmp']); // same as above +ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} +``` + +Returns a [ShellString](#shellstringstr) (with array-like properties) of all +the files in the given `path`, or files in the current directory if no +`path` is provided. + + +### mkdir([options,] dir [, dir ...]) +### mkdir([options,] dir_array) + +Available options: + ++ `-p`: full path (and create intermediate directories, if necessary) + +Examples: + +```javascript +mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); +mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above +``` + +Creates directories. Returns a [ShellString](#shellstringstr) indicating +success or failure. + + +### mv([options ,] source [, source ...], dest') +### mv([options ,] source_array, dest') + +Available options: + ++ `-f`: force (default behavior) ++ `-n`: no-clobber + +Examples: + +```javascript +mv('-n', 'file', 'dir/'); +mv('file1', 'file2', 'dir/'); +mv(['file1', 'file2'], 'dir/'); // same as above +``` + +Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) +indicating success or failure. + + +### pwd() + +Returns the current directory as a [ShellString](#shellstringstr). + + +### rm([options,] file [, file ...]) +### rm([options,] file_array) + +Available options: + ++ `-f`: force ++ `-r, -R`: recursive + +Examples: + +```javascript +rm('-rf', '/tmp/*'); +rm('some_file.txt', 'another_file.txt'); +rm(['some_file.txt', 'another_file.txt']); // same as above +``` + +Removes files. Returns a [ShellString](#shellstringstr) indicating success +or failure. + + +### sed([options,] search_regex, replacement, file [, file ...]) +### sed([options,] search_regex, replacement, file_array) + +Available options: + ++ `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ + +Examples: + +```javascript +sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); +``` + +Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on +each of the lines from the input string using the given `search_regex` and `replacement` string or +function. Returns the new [ShellString](#shellstringstr) after replacement. + +Note: + +Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified +using the `$n` syntax: + +```javascript +sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); +``` + +Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file +(split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') +will not match anything and nothing will be replaced. + + +### set(options) + +Available options: + ++ `+/-e`: exit upon error (`config.fatal`) ++ `+/-v`: verbose: show all commands (`config.verbose`) ++ `+/-f`: disable filename expansion (globbing) + +Examples: + +```javascript +set('-e'); // exit upon first error +set('+e'); // this undoes a "set('-e')" +``` + +Sets global configuration variables. + + +### sort([options,] file [, file ...]) +### sort([options,] file_array) + +Available options: + ++ `-r`: Reverse the results ++ `-n`: Compare according to numerical value + +Examples: + +```javascript +sort('foo.txt', 'bar.txt'); +sort('-r', 'foo.txt'); +``` + +Return the contents of the `file`s, sorted line-by-line as a +[ShellString](#shellstringstr). Sorting multiple files mixes their content +(just as unix `sort` does). + + +### tail([{'-n': \},] file [, file ...]) +### tail([{'-n': \},] file_array) + +Available options: + ++ `-n `: Show the last `` lines of `file`s + +Examples: + +```javascript +var str = tail({'-n': 1}, 'file*.txt'); +var str = tail('file1', 'file2'); +var str = tail(['file1', 'file2']); // same as above +``` + +Read the end of a `file`. Returns a [ShellString](#shellstringstr). + + +### tempdir() + +Examples: + +```javascript +var tmp = tempdir(); // "/tmp" for most *nix platforms +``` + +Searches and returns string containing a writeable, platform-dependent temporary directory. +Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). + + +### test(expression) + +Available expression primaries: + ++ `'-b', 'path'`: true if path is a block device ++ `'-c', 'path'`: true if path is a character device ++ `'-d', 'path'`: true if path is a directory ++ `'-e', 'path'`: true if path exists ++ `'-f', 'path'`: true if path is a regular file ++ `'-L', 'path'`: true if path is a symbolic link ++ `'-p', 'path'`: true if path is a pipe (FIFO) ++ `'-S', 'path'`: true if path is a socket + +Examples: + +```javascript +if (test('-d', path)) { /* do something with dir */ }; +if (!test('-f', path)) continue; // skip if it's not a regular file +``` + +Evaluates `expression` using the available primaries and returns +corresponding boolean value. + + +### ShellString.prototype.to(file) + +Examples: + +```javascript +cat('input.txt').to('output.txt'); +``` + +Analogous to the redirection operator `>` in Unix, but works with +`ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix +redirections, `to()` will overwrite any existing file!_ Returns the same +[ShellString](#shellstringstr) this operated on, to support chaining. + + +### ShellString.prototype.toEnd(file) + +Examples: + +```javascript +cat('input.txt').toEnd('output.txt'); +``` + +Analogous to the redirect-and-append operator `>>` in Unix, but works with +`ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the +same [ShellString](#shellstringstr) this operated on, to support chaining. + + +### touch([options,] file [, file ...]) +### touch([options,] file_array) + +Available options: + ++ `-a`: Change only the access time ++ `-c`: Do not create any files ++ `-m`: Change only the modification time ++ `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) + instead of current time ++ `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current + time + +Examples: + +```javascript +touch('source.js'); +touch('-c', 'path/to/file.js'); +touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); +touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); +touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); +``` + +Update the access and modification times of each file to the current time. +A file argument that does not exist is created empty, unless `-c` is supplied. +This is a partial implementation of +[`touch(1)`](http://linux.die.net/man/1/touch). Returns a +[ShellString](#shellstringstr) indicating success or failure. + + +### uniq([options,] [input, [output]]) + +Available options: + ++ `-i`: Ignore case while comparing ++ `-c`: Prefix lines by the number of occurrences ++ `-d`: Only print duplicate lines, one for each group of identical lines + +Examples: + +```javascript +uniq('foo.txt'); +uniq('-i', 'foo.txt'); +uniq('-cd', 'foo.txt', 'bar.txt'); +``` + +Filter adjacent matching lines from `input`. Returns a +[ShellString](#shellstringstr). + + +### which(command) + +Examples: + +```javascript +var nodeExec = which('node'); +``` + +Searches for `command` in the system's `PATH`. On Windows, this uses the +`PATHEXT` variable to append the extension if it's not already executable. +Returns a [ShellString](#shellstringstr) containing the absolute path to +`command`. + + +### exit(code) + +Exits the current process with the given exit `code`. + +### error() + +Tests if error occurred in the last command. Returns a truthy value if an +error returned, or a falsy value otherwise. + +**Note**: do not rely on the +return value to be an error message. If you need the last error message, use +the `.stderr` attribute from the last command's return value instead. + + +### errorCode() + +Returns the error code from the last command. + + +### ShellString(str) + +Examples: + +```javascript +var foo = new ShellString('hello world'); +``` + +This is a dedicated type returned by most ShellJS methods, which wraps a +string (or array) value. This has all the string (or array) methods, but +also exposes extra methods: [`.to()`](#shellstringprototypetofile), +[`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods +(ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string +by calling `.toString()`. + +This type also exposes the corresponding command's stdout, stderr, and +return status code via the `.stdout` (string), `.stderr` (string), and +`.code` (number) properties respectively. + + +### env['VAR_NAME'] + +Object containing environment variables (both getter and setter). Shortcut +to `process.env`. + +### Pipes + +Examples: + +```javascript +grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); +echo("files with o's in the name:\n" + ls().grep('o')); +cat('test.js').exec('node'); // pipe to exec() call +``` + +Commands can send their output to another command in a pipe-like fashion. +`sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand +side of a pipe. Pipes can be chained. + +## Configuration + + +### config.silent + +Example: + +```javascript +var sh = require('shelljs'); +var silentState = sh.config.silent; // save old silent state +sh.config.silent = true; +/* ... */ +sh.config.silent = silentState; // restore old silent state +``` + +Suppresses all command output if `true`, except for `echo()` calls. +Default is `false`. + +### config.fatal + +Example: + +```javascript +require('shelljs/global'); +config.fatal = true; // or set('-e'); +cp('this_file_does_not_exist', '/dev/null'); // throws Error here +/* more commands... */ +``` + +If `true`, the script will throw a Javascript error when any shell.js +command encounters an error. Default is `false`. This is analogous to +Bash's `set -e`. + +### config.verbose + +Example: + +```javascript +config.verbose = true; // or set('-v'); +cd('dir/'); +rm('-rf', 'foo.txt', 'bar.txt'); +exec('echo hello'); +``` + +Will print each command as follows: + +``` +cd dir/ +rm -rf foo.txt bar.txt +exec echo hello +``` + +### config.globOptions (deprecated) + +**Deprecated**: we recommend that you do not edit `config.globOptions`. +Support for this configuration option may be changed or removed in a future +ShellJS release. + +**Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS +v0.9.x, `config.globOptions` is compatible with `fast-glob`. + +Example: + +```javascript +config.globOptions = {nodir: true}; +``` + +`config.globOptions` changes how ShellJS expands glob (wildcard) +expressions. See +[fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) +for available options. Be aware that modifying `config.globOptions` **may +break ShellJS functionality.** + +### config.reset() + +Example: + +```javascript +var shell = require('shelljs'); +// Make changes to shell.config, and do stuff... +/* ... */ +shell.config.reset(); // reset to original state +// Do more stuff, but with original settings +/* ... */ +``` + +Reset `shell.config` to the defaults: + +```javascript +{ + fatal: false, + globOptions: {}, + maxdepth: 255, + noglob: false, + silent: false, + verbose: false, +} +``` + +## Team + +| [![Nate Fischer](https://avatars.githubusercontent.com/u/5801521?s=130)](https://github.com/nfischer) | +|:---:| +| [Nate Fischer](https://github.com/nfischer) | diff --git a/node_modules/shelljs/global.js b/node_modules/shelljs/global.js new file mode 100644 index 0000000..e061f5a --- /dev/null +++ b/node_modules/shelljs/global.js @@ -0,0 +1,15 @@ +/* eslint no-extend-native: 0 */ +var shell = require('./shell'); +var common = require('./src/common'); + +Object.keys(shell).forEach(function (cmd) { + global[cmd] = shell[cmd]; +}); + +var _to = require('./src/to'); + +String.prototype.to = common.wrap('to', _to); + +var _toEnd = require('./src/toEnd'); + +String.prototype.toEnd = common.wrap('toEnd', _toEnd); diff --git a/node_modules/shelljs/make.js b/node_modules/shelljs/make.js new file mode 100644 index 0000000..a8438c8 --- /dev/null +++ b/node_modules/shelljs/make.js @@ -0,0 +1,57 @@ +require('./global'); + +global.config.fatal = true; +global.target = {}; + +var args = process.argv.slice(2), + targetArgs, + dashesLoc = args.indexOf('--'); + +// split args, everything after -- if only for targets +if (dashesLoc > -1) { + targetArgs = args.slice(dashesLoc + 1, args.length); + args = args.slice(0, dashesLoc); +} + +// This ensures we only execute the script targets after the entire script has +// been evaluated +setTimeout(function() { + var t; + + if (args.length === 1 && args[0] === '--help') { + console.log('Available targets:'); + for (t in global.target) + console.log(' ' + t); + return; + } + + // Wrap targets to prevent duplicate execution + for (t in global.target) { + (function(t, oldTarget){ + + // Wrap it + global.target[t] = function() { + if (!oldTarget.done){ + oldTarget.done = true; + oldTarget.result = oldTarget.apply(oldTarget, arguments); + } + return oldTarget.result; + }; + + })(t, global.target[t]); + } + + // Execute desired targets + if (args.length > 0) { + args.forEach(function(arg) { + if (arg in global.target) + global.target[arg](targetArgs); + else { + console.log('no such target: ' + arg); + } + }); + } else if ('all' in global.target) { + global.target.all(targetArgs); + } + +}, 0); diff --git a/node_modules/shelljs/package.json b/node_modules/shelljs/package.json new file mode 100644 index 0000000..a5c3299 --- /dev/null +++ b/node_modules/shelljs/package.json @@ -0,0 +1,90 @@ +{ + "name": "shelljs", + "version": "0.10.0", + "description": "Portable Unix shell commands for Node.js", + "keywords": [ + "shelljs", + "bash", + "unix", + "shell", + "makefile", + "make", + "jake", + "synchronous" + ], + "contributors": [ + "Nate Fischer (https://github.com/nfischer)", + "Brandon Freitag (https://github.com/freitagbr)" + ], + "repository": { + "type": "git", + "url": "git://github.com/shelljs/shelljs.git" + }, + "license": "BSD-3-Clause", + "homepage": "http://github.com/shelljs/shelljs", + "main": "./shell.js", + "exports": { + ".": "./shell.js", + "./global": "./global.js", + "./global.js": "./global.js", + "./make": "./make.js", + "./make.js": "./make.js", + "./package": "./package.json", + "./package.json": "./package.json", + "./plugin": "./plugin.js", + "./plugin.js": "./plugin.js" + }, + "files": [ + "global.js", + "make.js", + "plugin.js", + "shell.js", + "src" + ], + "scripts": { + "check-node-support": "node scripts/check-node-support", + "posttest": "npm run lint", + "test": "ava", + "test-with-coverage": "nyc --reporter=text --reporter=lcov ava", + "gendocs": "node scripts/generate-docs", + "lint": "eslint .", + "after-travis": "travis-check-changes", + "changelog": "shelljs-changelog", + "release:major": "shelljs-release major", + "release:minor": "shelljs-release minor", + "release:patch": "shelljs-release patch" + }, + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "ava": { + "serial": true, + "workerThreads": false, + "powerAssert": false, + "files": [ + "test/*.js" + ], + "helpers": [ + "test/resources/**", + "test/utils/**" + ] + }, + "devDependencies": { + "ava": "^6.2.0", + "chalk": "^4.1.2", + "coffee-script": "^1.12.7", + "eslint": "^8.2.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-plugin-import": "^2.31.0", + "js-yaml": "^4.1.0", + "nyc": "^17.1.0", + "shelljs-changelog": "^0.2.6", + "shelljs-release": "^0.5.3", + "shx": "^0.4.0", + "travis-check-changes": "^0.5.1" + }, + "engines": { + "node": ">=18" + } +} diff --git a/node_modules/shelljs/plugin.js b/node_modules/shelljs/plugin.js new file mode 100644 index 0000000..2e15850 --- /dev/null +++ b/node_modules/shelljs/plugin.js @@ -0,0 +1,16 @@ +// Various utilities exposed to plugins + +require('./shell'); // Create the ShellJS instance (mandatory) + +var common = require('./src/common'); + +var exportedAttributes = [ + 'error', // For signaling errors from within commands + 'parseOptions', // For custom option parsing + 'readFromPipe', // For commands with the .canReceivePipe attribute + 'register', // For registering plugins +]; + +exportedAttributes.forEach(function (attr) { + exports[attr] = common[attr]; +}); diff --git a/node_modules/shelljs/shell.js b/node_modules/shelljs/shell.js new file mode 100644 index 0000000..8a3a67d --- /dev/null +++ b/node_modules/shelljs/shell.js @@ -0,0 +1,216 @@ +// +// ShellJS +// Unix shell commands on top of Node's API +// +// Copyright (c) 2012 Artur Adib +// http://github.com/shelljs/shelljs +// + +var common = require('./src/common'); + +module.exports = common.shell; + +//@ +//@ All commands run synchronously, unless otherwise stated. +//@ All commands accept standard bash globbing characters (`*`, `?`, etc.), +//@ compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). +//@ +//@ For less-commonly used commands and features, please check out our [wiki +//@ page](https://github.com/shelljs/shelljs/wiki). +//@ + +// Include the docs for all the default commands +//@commands + +// Load all default commands. We import these for their side effect of loading +// using the plugin architecture via `common.register()`. +require('./src/cat'); +require('./src/cd'); +require('./src/chmod'); +require('./src/cmd'); +require('./src/cp'); +require('./src/dirs'); +require('./src/echo'); +require('./src/exec'); +require('./src/exec-child'); // A hint to the bundler to keep exec-child.js +require('./src/find'); +require('./src/grep'); +require('./src/head'); +require('./src/ln'); +require('./src/ls'); +require('./src/mkdir'); +require('./src/mv'); +require('./src/popd'); +require('./src/pushd'); +require('./src/pwd'); +require('./src/rm'); +require('./src/sed'); +require('./src/set'); +require('./src/sort'); +require('./src/tail'); +require('./src/tempdir'); +require('./src/test'); +require('./src/to'); +require('./src/toEnd'); +require('./src/touch'); +require('./src/uniq'); +require('./src/which'); + +//@ +//@ ### exit(code) +//@ +//@ Exits the current process with the given exit `code`. +module.exports.exit = function exit(code) { + common.state.error = null; + common.state.errorCode = 0; + if (code) { + common.error('exit', { + continue: true, + code, + prefix: '', + silent: true, + fatal: false, + }); + process.exit(code); + } else { + process.exit(); + } +}; + +//@include ./src/error.js +module.exports.error = require('./src/error'); + +//@include ./src/errorCode.js +module.exports.errorCode = require('./src/errorCode'); + +//@include ./src/common.js +module.exports.ShellString = common.ShellString; + +//@ +//@ ### env['VAR_NAME'] +//@ +//@ Object containing environment variables (both getter and setter). Shortcut +//@ to `process.env`. +module.exports.env = process.env; + +//@ +//@ ### Pipes +//@ +//@ Examples: +//@ +//@ ```javascript +//@ grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); +//@ echo("files with o's in the name:\n" + ls().grep('o')); +//@ cat('test.js').exec('node'); // pipe to exec() call +//@ ``` +//@ +//@ Commands can send their output to another command in a pipe-like fashion. +//@ `sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand +//@ side of a pipe. Pipes can be chained. + +//@ +//@ ## Configuration +//@ + +module.exports.config = common.config; + +//@ +//@ ### config.silent +//@ +//@ Example: +//@ +//@ ```javascript +//@ var sh = require('shelljs'); +//@ var silentState = sh.config.silent; // save old silent state +//@ sh.config.silent = true; +//@ /* ... */ +//@ sh.config.silent = silentState; // restore old silent state +//@ ``` +//@ +//@ Suppresses all command output if `true`, except for `echo()` calls. +//@ Default is `false`. + +//@ +//@ ### config.fatal +//@ +//@ Example: +//@ +//@ ```javascript +//@ require('shelljs/global'); +//@ config.fatal = true; // or set('-e'); +//@ cp('this_file_does_not_exist', '/dev/null'); // throws Error here +//@ /* more commands... */ +//@ ``` +//@ +//@ If `true`, the script will throw a Javascript error when any shell.js +//@ command encounters an error. Default is `false`. This is analogous to +//@ Bash's `set -e`. + +//@ +//@ ### config.verbose +//@ +//@ Example: +//@ +//@ ```javascript +//@ config.verbose = true; // or set('-v'); +//@ cd('dir/'); +//@ rm('-rf', 'foo.txt', 'bar.txt'); +//@ exec('echo hello'); +//@ ``` +//@ +//@ Will print each command as follows: +//@ +//@ ``` +//@ cd dir/ +//@ rm -rf foo.txt bar.txt +//@ exec echo hello +//@ ``` + +//@ +//@ ### config.globOptions (deprecated) +//@ +//@ **Deprecated**: we recommend that you do not edit `config.globOptions`. +//@ Support for this configuration option may be changed or removed in a future +//@ ShellJS release. +//@ +//@ **Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS +//@ v0.9.x, `config.globOptions` is compatible with `fast-glob`. +//@ +//@ Example: +//@ +//@ ```javascript +//@ config.globOptions = {nodir: true}; +//@ ``` +//@ +//@ `config.globOptions` changes how ShellJS expands glob (wildcard) +//@ expressions. See +//@ [fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) +//@ for available options. Be aware that modifying `config.globOptions` **may +//@ break ShellJS functionality.** + +//@ +//@ ### config.reset() +//@ +//@ Example: +//@ +//@ ```javascript +//@ var shell = require('shelljs'); +//@ // Make changes to shell.config, and do stuff... +//@ /* ... */ +//@ shell.config.reset(); // reset to original state +//@ // Do more stuff, but with original settings +//@ /* ... */ +//@ ``` +//@ +//@ Reset `shell.config` to the defaults: +//@ +//@ ```javascript +//@ { +//@ fatal: false, +//@ globOptions: {}, +//@ maxdepth: 255, +//@ noglob: false, +//@ silent: false, +//@ verbose: false, +//@ } +//@ ``` diff --git a/node_modules/shelljs/src/cat.js b/node_modules/shelljs/src/cat.js new file mode 100644 index 0000000..ca264a9 --- /dev/null +++ b/node_modules/shelljs/src/cat.js @@ -0,0 +1,76 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('cat', _cat, { + canReceivePipe: true, + cmdOptions: { + 'n': 'number', + }, +}); + +//@ +//@ ### cat([options,] file [, file ...]) +//@ ### cat([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-n`: number all output lines +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = cat('file*.txt'); +//@ var str = cat('file1', 'file2'); +//@ var str = cat(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) containing the given file, or a +//@ concatenated string containing the files if more than one file is given (a +//@ new line character is introduced between each file). +function _cat(options, files) { + var cat = common.readFromPipe(); + + if (!files && !cat) common.error('no paths given'); + + files = [].slice.call(arguments, 1); + + files.forEach(function (file) { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file); + } else if (common.statFollowLinks(file).isDirectory()) { + common.error(file + ': Is a directory'); + } + + cat += fs.readFileSync(file, 'utf8'); + }); + + if (options.number) { + cat = addNumbers(cat); + } + + return cat; +} +module.exports = _cat; + +function addNumbers(cat) { + var lines = cat.split('\n'); + var lastLine = lines.pop(); + + lines = lines.map(function (line, i) { + return numberedLine(i + 1, line); + }); + + if (lastLine.length) { + lastLine = numberedLine(lines.length + 1, lastLine); + } + lines.push(lastLine); + + return lines.join('\n'); +} + +function numberedLine(n, line) { + // GNU cat use six pad start number + tab. See http://lingrok.org/xref/coreutils/src/cat.c#57 + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/padStart + var number = (' ' + n).slice(-6) + '\t'; + return number + line; +} diff --git a/node_modules/shelljs/src/cd.js b/node_modules/shelljs/src/cd.js new file mode 100644 index 0000000..1c6e73f --- /dev/null +++ b/node_modules/shelljs/src/cd.js @@ -0,0 +1,40 @@ +var os = require('os'); +var common = require('./common'); + +common.register('cd', _cd, {}); + +//@ +//@ ### cd([dir]) +//@ +//@ Changes to directory `dir` for the duration of the script. Changes to home +//@ directory if no argument is supplied. Returns a +//@ [ShellString](#shellstringstr) to indicate success or failure. +function _cd(options, dir) { + if (!dir) dir = os.homedir(); + + if (dir === '-') { + if (!process.env.OLDPWD) { + common.error('could not find previous directory'); + } else { + dir = process.env.OLDPWD; + } + } + + try { + var curDir = process.cwd(); + process.chdir(dir); + process.env.OLDPWD = curDir; + } catch (e) { + // something went wrong, let's figure out the error + var err; + try { + common.statFollowLinks(dir); // if this succeeds, it must be some sort of file + err = 'not a directory: ' + dir; + } catch (e2) { + err = 'no such file or directory: ' + dir; + } + if (err) common.error(err); + } + return ''; +} +module.exports = _cd; diff --git a/node_modules/shelljs/src/chmod.js b/node_modules/shelljs/src/chmod.js new file mode 100644 index 0000000..b930cc7 --- /dev/null +++ b/node_modules/shelljs/src/chmod.js @@ -0,0 +1,222 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +var PERMS = (function (base) { + return { + OTHER_EXEC: base.EXEC, + OTHER_WRITE: base.WRITE, + OTHER_READ: base.READ, + + GROUP_EXEC: base.EXEC << 3, + GROUP_WRITE: base.WRITE << 3, + GROUP_READ: base.READ << 3, + + OWNER_EXEC: base.EXEC << 6, + OWNER_WRITE: base.WRITE << 6, + OWNER_READ: base.READ << 6, + + // Literal octal numbers are apparently not allowed in "strict" javascript. + STICKY: parseInt('01000', 8), + SETGID: parseInt('02000', 8), + SETUID: parseInt('04000', 8), + + TYPE_MASK: parseInt('0770000', 8), + }; +}({ + EXEC: 1, + WRITE: 2, + READ: 4, +})); + +common.register('chmod', _chmod, { +}); + +//@ +//@ ### chmod([options,] octal_mode || octal_string, file) +//@ ### chmod([options,] symbolic_mode, file) +//@ +//@ Available options: +//@ +//@ + `-v`: output a diagnostic for every file processed//@ +//@ + `-c`: like verbose, but report only when a change is made//@ +//@ + `-R`: change files and directories recursively//@ +//@ +//@ Examples: +//@ +//@ ```javascript +//@ chmod(755, '/Users/brandon'); +//@ chmod('755', '/Users/brandon'); // same as above +//@ chmod('u+x', '/Users/brandon'); +//@ chmod('-R', 'a-w', '/Users/brandon'); +//@ ``` +//@ +//@ Alters the permissions of a file or directory by either specifying the +//@ absolute permissions in octal form or expressing the changes in symbols. +//@ This command tries to mimic the POSIX behavior as much as possible. +//@ Notable exceptions: +//@ +//@ + In symbolic modes, `a-r` and `-r` are identical. No consideration is +//@ given to the `umask`. +//@ + There is no "quiet" option, since default behavior is to run silent. +//@ + Windows OS uses a very different permission model than POSIX. `chmod()` +//@ does its best on Windows, but there are limits to how file permissions can +//@ be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, +//@ so cross-platform compatibility should not be a concern there. +//@ +//@ Returns a [ShellString](#shellstringstr) indicating success or failure. +function _chmod(options, mode, filePattern) { + if (!filePattern) { + if (options.length > 0 && options.charAt(0) === '-') { + // Special case where the specified file permissions started with - to subtract perms, which + // get picked up by the option parser as command flags. + // If we are down by one argument and options starts with -, shift everything over. + [].unshift.call(arguments, ''); + } else { + common.error('You must specify a file.'); + } + } + + options = common.parseOptions(options, { + 'R': 'recursive', + 'c': 'changes', + 'v': 'verbose', + }); + + filePattern = [].slice.call(arguments, 2); + + var files; + + // TODO: replace this with a call to common.expand() + if (options.recursive) { + files = []; + filePattern.forEach(function addFile(expandedFile) { + var stat = common.statNoFollowLinks(expandedFile); + + if (!stat.isSymbolicLink()) { + files.push(expandedFile); + + if (stat.isDirectory()) { // intentionally does not follow symlinks. + fs.readdirSync(expandedFile).forEach(function (child) { + addFile(expandedFile + '/' + child); + }); + } + } + }); + } else { + files = filePattern; + } + + files.forEach(function innerChmod(file) { + file = path.resolve(file); + if (!fs.existsSync(file)) { + common.error('File not found: ' + file); + } + + // When recursing, don't follow symlinks. + if (options.recursive && common.statNoFollowLinks(file).isSymbolicLink()) { + return; + } + + var stat = common.statFollowLinks(file); + var isDir = stat.isDirectory(); + var perms = stat.mode; + var type = perms & PERMS.TYPE_MASK; + + var newPerms = perms; + + if (Number.isNaN(parseInt(mode, 8))) { + // parse options + mode.split(',').forEach(function (symbolicMode) { + var pattern = /([ugoa]*)([=+-])([rwxXst]*)/i; + var matches = pattern.exec(symbolicMode); + + if (matches) { + var applyTo = matches[1]; + var operator = matches[2]; + var change = matches[3]; + + var changeOwner = applyTo.includes('u') || applyTo === 'a' || applyTo === ''; + var changeGroup = applyTo.includes('g') || applyTo === 'a' || applyTo === ''; + var changeOther = applyTo.includes('o') || applyTo === 'a' || applyTo === ''; + + var changeRead = change.includes('r'); + var changeWrite = change.includes('w'); + var changeExec = change.includes('x'); + var changeExecDir = change.includes('X'); + var changeSticky = change.includes('t'); + var changeSetuid = change.includes('s'); + + if (changeExecDir && isDir) { + changeExec = true; + } + + var mask = 0; + if (changeOwner) { + mask |= (changeRead ? PERMS.OWNER_READ : 0) + (changeWrite ? PERMS.OWNER_WRITE : 0) + (changeExec ? PERMS.OWNER_EXEC : 0) + (changeSetuid ? PERMS.SETUID : 0); + } + if (changeGroup) { + mask |= (changeRead ? PERMS.GROUP_READ : 0) + (changeWrite ? PERMS.GROUP_WRITE : 0) + (changeExec ? PERMS.GROUP_EXEC : 0) + (changeSetuid ? PERMS.SETGID : 0); + } + if (changeOther) { + mask |= (changeRead ? PERMS.OTHER_READ : 0) + (changeWrite ? PERMS.OTHER_WRITE : 0) + (changeExec ? PERMS.OTHER_EXEC : 0); + } + + // Sticky bit is special - it's not tied to user, group or other. + if (changeSticky) { + mask |= PERMS.STICKY; + } + + switch (operator) { + case '+': + newPerms |= mask; + break; + + case '-': + newPerms &= ~mask; + break; + + case '=': + newPerms = type + mask; + + // According to POSIX, when using = to explicitly set the + // permissions, setuid and setgid can never be cleared. + if (common.statFollowLinks(file).isDirectory()) { + newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; + } + break; + default: + common.error('Could not recognize operator: `' + operator + '`'); + } + + if (options.verbose) { + console.log(file + ' -> ' + newPerms.toString(8)); + } + + if (perms !== newPerms) { + if (!options.verbose && options.changes) { + console.log(file + ' -> ' + newPerms.toString(8)); + } + fs.chmodSync(file, newPerms); + perms = newPerms; // for the next round of changes! + } + } else { + common.error('Invalid symbolic mode change: ' + symbolicMode); + } + }); + } else { + // they gave us a full number + newPerms = type + parseInt(mode, 8); + + // POSIX rules are that setuid and setgid can only be added using numeric + // form, but not cleared. + if (common.statFollowLinks(file).isDirectory()) { + newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; + } + + fs.chmodSync(file, newPerms); + } + }); + return ''; +} +module.exports = _chmod; diff --git a/node_modules/shelljs/src/cmd.js b/node_modules/shelljs/src/cmd.js new file mode 100644 index 0000000..a00d6c4 --- /dev/null +++ b/node_modules/shelljs/src/cmd.js @@ -0,0 +1,138 @@ +var execa = require('execa'); +var common = require('./common'); + +var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; +var COMMAND_NOT_FOUND_ERROR_CODE = 127; + +common.register('cmd', _cmd, { + cmdOptions: null, + globStart: 1, + canReceivePipe: true, + wrapOutput: true, +}); + +function isCommandNotFound(execaResult) { + if (process.platform === 'win32') { + var str = 'is not recognized as an internal or external command'; + return execaResult.exitCode && execaResult.stderr.includes(str); + } + return execaResult.failed && execaResult.code === 'ENOENT'; +} + +function isExecaInternalError(result) { + if (typeof result.stdout !== 'string') return true; + if (typeof result.stderr !== 'string') return true; + if (typeof result.exitCode !== 'number') return true; + if (result.exitCode === 0 && result.failed) return true; + // Otherwise assume this executed correctly. The command may still have exited + // with non-zero status, but that's not due to anything execa did. + return false; +} + +//@ +//@ ### cmd(arg1[, arg2, ...] [, options]) +//@ +//@ Available options: +//@ +//@ + `cwd: directoryPath`: change the current working directory only for this +//@ cmd() invocation. +//@ + `maxBuffer: num`: Raise or decrease the default buffer size for +//@ stdout/stderr. +//@ + `timeout`: Change the default timeout. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var version = cmd('node', '--version').stdout; +//@ cmd('git', 'commit', '-am', `Add suport for node ${version}`); +//@ console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) +//@ console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) +//@ ``` +//@ +//@ Executes the given command synchronously. This is intended as an easier +//@ alternative for [exec()](#execcommand--options--callback), with better +//@ security around globbing, comamnd injection, and variable expansion. This is +//@ guaranteed to only run one external command, and won't give special +//@ treatment for any shell characters (ex. this treats `|` as a literal +//@ character, not as a shell pipeline). +//@ This returns a [ShellString](#shellstringstr). +//@ +//@ By default, this performs globbing on all platforms, but you can disable +//@ this with `set('-f')`. +//@ +//@ This **does not** support asynchronous mode. If you need asynchronous +//@ command execution, check out [execa](https://www.npmjs.com/package/execa) or +//@ the node builtin `child_process.execFile()` instead. +function _cmd(options, command, commandArgs, userOptions) { + if (!command) { + common.error('Must specify a non-empty string as a command'); + } + + // `options` will usually not have a value: it's added by our commandline flag + // parsing engine. + commandArgs = [].slice.call(arguments, 2); + + // `userOptions` may or may not be provided. We need to check the last + // argument. If it's an object, assume it's meant to be passed as + // userOptions (since ShellStrings are already flattened to strings). + if (commandArgs.length === 0) { + userOptions = {}; + } else { + var lastArg = commandArgs.pop(); + if (common.isObject(lastArg)) { + userOptions = lastArg; + } else { + userOptions = {}; + commandArgs.push(lastArg); + } + } + + var pipe = common.readFromPipe(); + + // Some of our defaults differ from execa's defaults. These can be overridden + // by the user. + var defaultOptions = { + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + stripFinalNewline: false, // Preserve trailing newlines for consistency with unix. + reject: false, // Use ShellJS's error handling system. + }; + + // For other options, we forbid the user from overriding them (either for + // correctness or security). + var requiredOptions = { + input: pipe, + shell: false, + }; + + var execaOptions = + Object.assign(defaultOptions, userOptions, requiredOptions); + + var result = execa.sync(command, commandArgs, execaOptions); + var stdout; + var stderr; + var code; + if (isCommandNotFound(result)) { + // This can happen if `command` is not an executable binary, or possibly + // under other conditions. + stdout = ''; + stderr = "'" + command + "': command not found"; + code = COMMAND_NOT_FOUND_ERROR_CODE; + } else if (isExecaInternalError(result)) { + // Catch-all: execa tried to run `command` but it encountered some error + // (ex. maxBuffer, timeout). + stdout = result.stdout || ''; + stderr = result.stderr || + `'${command}' encountered an error during execution`; + code = result.exitCode !== undefined && result.exitCode > 0 ? result.exitCode : 1; + } else { + // Normal exit: execa was able to execute `command` and get a return value. + stdout = result.stdout.toString(); + stderr = result.stderr.toString(); + code = result.exitCode; + } + + // Pass `continue: true` so we can specify a value for stdout. + if (code) common.error(stderr, code, { silent: true, continue: true }); + return new common.ShellString(stdout, stderr, code); +} +module.exports = _cmd; diff --git a/node_modules/shelljs/src/common.js b/node_modules/shelljs/src/common.js new file mode 100644 index 0000000..b9ffeda --- /dev/null +++ b/node_modules/shelljs/src/common.js @@ -0,0 +1,545 @@ +// Ignore warning about 'new String()' and use of the Buffer constructor +/* eslint no-new-wrappers: "off", + no-buffer-constructor: "off" */ + +'use strict'; + +var os = require('os'); +var fs = require('fs'); +var glob = require('fast-glob'); + +var shell = {}; +exports.shell = shell; + +var shellMethods = Object.create(shell); + +exports.extend = Object.assign; + +// Check if we're running under electron +var isElectron = Boolean(process.versions.electron); + +// Module globals (assume no execPath by default) +var DEFAULT_CONFIG = { + fatal: false, + globOptions: {}, + maxdepth: 255, + noglob: false, + silent: false, + verbose: false, + execPath: null, + bufLength: 64 * 1024, // 64KB +}; + +var config = { + reset() { + Object.assign(this, DEFAULT_CONFIG); + if (!isElectron) { + this.execPath = process.execPath; + } + }, + resetForTesting() { + this.reset(); + this.silent = true; + }, +}; + +config.reset(); +exports.config = config; + +// Note: commands should generally consider these as read-only values. +var state = { + error: null, + errorCode: 0, + currentCmd: 'shell.js', +}; +exports.state = state; + +delete process.env.OLDPWD; // initially, there's no previous directory + +// Reliably test if something is any sort of javascript object +function isObject(a) { + return typeof a === 'object' && a !== null; +} +exports.isObject = isObject; + +function log() { + /* istanbul ignore next */ + if (!config.silent) { + console.error.apply(console, arguments); + } +} +exports.log = log; + +// Converts strings to be equivalent across all platforms. Primarily responsible +// for making sure we use '/' instead of '\' as path separators, but this may be +// expanded in the future if necessary +function convertErrorOutput(msg) { + if (typeof msg !== 'string') { + throw new TypeError('input must be a string'); + } + return msg.replace(/\\/g, '/'); +} +exports.convertErrorOutput = convertErrorOutput; + +// An exception class to help propagate command errors (e.g., non-zero exit +// status) up to the top-level. {@param value} should be a ShellString. +class CommandError extends Error { + constructor(value) { + super(value.toString()); + this.returnValue = value; + } +} +exports.CommandError = CommandError; // visible for testing + +// Shows error message. Throws if fatal is true (defaults to config.fatal, overridable with options.fatal) +function error(msg, _code, options) { + // Validate input + if (typeof msg !== 'string') throw new Error('msg must be a string'); + + var DEFAULT_OPTIONS = { + continue: false, + code: 1, + prefix: state.currentCmd + ': ', + silent: false, + fatal: config.fatal, + }; + + if (typeof _code === 'number' && isObject(options)) { + options.code = _code; + } else if (isObject(_code)) { // no 'code' + options = _code; + } else if (typeof _code === 'number') { // no 'options' + options = { code: _code }; + } else if (typeof _code !== 'number') { // only 'msg' + options = {}; + } + options = Object.assign({}, DEFAULT_OPTIONS, options); + + if (!state.errorCode) state.errorCode = options.code; + + var logEntry = convertErrorOutput(options.prefix + msg); + state.error = state.error ? state.error + '\n' : ''; + state.error += logEntry; + + // Throw an error, or log the entry + if (options.fatal) { + var err = new Error(logEntry); + err.code = options.code; + throw err; + } + if (msg.length > 0 && !options.silent) log(logEntry); + + if (!options.continue) { + throw new CommandError(new ShellString('', state.error, state.errorCode)); + } +} +exports.error = error; + +//@ +//@ ### ShellString(str) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var foo = new ShellString('hello world'); +//@ ``` +//@ +//@ This is a dedicated type returned by most ShellJS methods, which wraps a +//@ string (or array) value. This has all the string (or array) methods, but +//@ also exposes extra methods: [`.to()`](#shellstringprototypetofile), +//@ [`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods +//@ (ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string +//@ by calling `.toString()`. +//@ +//@ This type also exposes the corresponding command's stdout, stderr, and +//@ return status code via the `.stdout` (string), `.stderr` (string), and +//@ `.code` (number) properties respectively. +function ShellString(stdout, stderr, code) { + var that; + if (stdout instanceof Array) { + that = stdout; + that.stdout = stdout.join('\n'); + if (stdout.length > 0) that.stdout += '\n'; + } else { + that = new String(stdout); + that.stdout = stdout; + } + that.stderr = stderr; + that.code = code; + // A list of all commands that can appear on the right-hand side of a pipe + // (populated by calls to common.wrap()) + pipeMethods.forEach(function (cmd) { + that[cmd] = shellMethods[cmd].bind(that); + }); + return that; +} + +exports.ShellString = ShellString; + +// Returns {'alice': true, 'bob': false} when passed a string and dictionary as follows: +// parseOptions('-a', {'a':'alice', 'b':'bob'}); +// Returns {'reference': 'string-value', 'bob': false} when passed two dictionaries of the form: +// parseOptions({'-r': 'string-value'}, {'r':'reference', 'b':'bob'}); +// Throws an error when passed a string that does not start with '-': +// parseOptions('a', {'a':'alice'}); // throws +function parseOptions(opt, map, errorOptions) { + errorOptions = errorOptions || {}; + // Validate input + if (typeof opt !== 'string' && !isObject(opt)) { + throw new TypeError('options must be strings or key-value pairs'); + } else if (!isObject(map)) { + throw new TypeError('parseOptions() internal error: map must be an object'); + } else if (!isObject(errorOptions)) { + throw new TypeError( + 'parseOptions() internal error: errorOptions must be object', + ); + } + + if (opt === '--') { + // This means there are no options. + return {}; + } + + // All options are false by default + var options = {}; + Object.keys(map).forEach(function (letter) { + var optName = map[letter]; + if (optName[0] !== '!') { + options[optName] = false; + } + }); + + if (opt === '') return options; // defaults + + if (typeof opt === 'string') { + if (opt[0] !== '-') { + throw new Error("Options string must start with a '-'"); + } + + // e.g. chars = ['R', 'f'] + var chars = opt.slice(1).split(''); + + chars.forEach(function (c) { + if (c in map) { + var optionName = map[c]; + if (optionName[0] === '!') { + options[optionName.slice(1)] = false; + } else { + options[optionName] = true; + } + } else { + error('option not recognized: ' + c, errorOptions); + } + }); + } else { // opt is an Object + Object.keys(opt).forEach(function (key) { + if (key[0] === '-') { + // key is a string of the form '-r', '-d', etc. + var c = key[1]; + if (c in map) { + var optionName = map[c]; + options[optionName] = opt[key]; // assign the given value + } else { + error('option not recognized: ' + c, errorOptions); + } + } else if (key in options) { + // key is a "long option", so it should be the same + options[key] = opt[key]; + } else { + error('option not recognized: {' + key + ':...}', errorOptions); + } + }); + } + return options; +} +exports.parseOptions = parseOptions; + +function globOptions() { + // These options are just to make fast-glob be compatible with POSIX (bash) + // wildcard behavior. + var defaultGlobOptions = { + onlyFiles: false, + followSymbolicLinks: false, + }; + + var newGlobOptions = Object.assign({}, config.globOptions); + var optionRenames = { + // node-glob's 'nodir' is not quote the same as fast-glob's 'onlyFiles'. + // Compatibility for this is implemented at the call site. + mark: 'markDirectories', + matchBase: 'baseNameMatch', + }; + Object.keys(optionRenames).forEach(function (oldKey) { + var newKey = optionRenames[oldKey]; + if (oldKey in config.globOptions) { + newGlobOptions[newKey] = config.globOptions[oldKey]; + } + }); + var invertedOptionRenames = { + nobrace: 'braceExpansion', + noglobstar: 'globstar', + noext: 'extglob', + nocase: 'caseSensitiveMatch', + }; + Object.keys(invertedOptionRenames).forEach(function (oldKey) { + var newKey = invertedOptionRenames[oldKey]; + if (oldKey in config.globOptions) { + newGlobOptions[newKey] = !config.globOptions[oldKey]; + } + }); + return Object.assign({}, defaultGlobOptions, newGlobOptions); +} + +// Expands wildcards with matching (ie. existing) file names. +// For example: +// expand(['file*.js']) = ['file1.js', 'file2.js', ...] +// (if the files 'file1.js', 'file2.js', etc, exist in the current dir) +function expand(list) { + if (!Array.isArray(list)) { + throw new TypeError('must be an array'); + } + var expanded = []; + list.forEach(function (listEl) { + // Don't expand non-strings + if (typeof listEl !== 'string') { + expanded.push(listEl); + } else { + var ret; + var globOpts = globOptions(); + try { + ret = glob.sync(listEl, globOpts); + } catch (e) { + // if glob fails, interpret the string literally + ret = [listEl]; + } + // if nothing matched, interpret the string literally + ret = ret.length > 0 ? ret.sort() : [listEl]; + if (globOpts.nodir) { + ret = ret.filter(function (file) { + return !statNoFollowLinks(file).isDirectory(); + }); + } + expanded = expanded.concat(ret); + } + }); + return expanded; +} +exports.expand = expand; + +// Normalizes Buffer creation, using Buffer.alloc if possible. +// Also provides a good default buffer length for most use cases. +var buffer = typeof Buffer.alloc === 'function' ? + function (len) { + return Buffer.alloc(len || config.bufLength); + } : + function (len) { + return new Buffer(len || config.bufLength); + }; +exports.buffer = buffer; + +// Normalizes _unlinkSync() across platforms to match Unix behavior, i.e. +// file can be unlinked even if it's read-only, see https://github.com/joyent/node/issues/3006 +function unlinkSync(file) { + try { + fs.unlinkSync(file); + } catch (e) { + // Try to override file permission + /* istanbul ignore next */ + if (e.code === 'EPERM') { + fs.chmodSync(file, '0666'); + fs.unlinkSync(file); + } else { + throw e; + } + } +} +exports.unlinkSync = unlinkSync; + +// wrappers around common.statFollowLinks and common.statNoFollowLinks that clarify intent +// and improve readability +function statFollowLinks() { + return fs.statSync.apply(fs, arguments); +} +exports.statFollowLinks = statFollowLinks; + +function statNoFollowLinks() { + return fs.lstatSync.apply(fs, arguments); +} +exports.statNoFollowLinks = statNoFollowLinks; + +// e.g. 'shelljs_a5f185d0443ca...' +function randomFileName() { + function randomHash(count) { + if (count === 1) { + return parseInt(16 * Math.random(), 10).toString(16); + } + var hash = ''; + for (var i = 0; i < count; i++) { + hash += randomHash(1); + } + return hash; + } + + return 'shelljs_' + randomHash(20); +} +exports.randomFileName = randomFileName; + +// Common wrapper for all Unix-like commands that performs glob expansion, +// command-logging, and other nice things +function wrap(cmd, fn, options) { + options = options || {}; + return function () { + var retValue = null; + + state.currentCmd = cmd; + state.error = null; + state.errorCode = 0; + + try { + var args = [].slice.call(arguments, 0); + + // Log the command to stderr, if appropriate + if (config.verbose) { + console.error.apply(console, [cmd].concat(args)); + } + + // If this is coming from a pipe, let's set the pipedValue (otherwise, set + // it to the empty string) + state.pipedValue = (this && typeof this.stdout === 'string') ? this.stdout : ''; + + if (options.unix === false) { // this branch is for exec() + retValue = fn.apply(this, args); + } else { // and this branch is for everything else + if (isObject(args[0]) && args[0].constructor.name === 'Object') { + // a no-op, allowing the syntax `touch({'-r': file}, ...)` + } else if (args.length === 0 || typeof args[0] !== 'string' || args[0].length <= 1 || args[0][0] !== '-') { + args.unshift(''); // only add dummy option if '-option' not already present + } + + // flatten out arrays that are arguments, to make the syntax: + // `cp([file1, file2, file3], dest);` + // equivalent to: + // `cp(file1, file2, file3, dest);` + args = args.reduce(function (accum, cur) { + if (Array.isArray(cur)) { + return accum.concat(cur); + } + accum.push(cur); + return accum; + }, []); + + // Convert ShellStrings (basically just String objects) to regular strings + args = args.map(function (arg) { + if (isObject(arg) && arg.constructor.name === 'String') { + return arg.toString(); + } + return arg; + }); + + // Expand the '~' if appropriate + var homeDir = os.homedir(); + args = args.map(function (arg) { + if (typeof arg === 'string' && arg.slice(0, 2) === '~/' || arg === '~') { + return arg.replace(/^~/, homeDir); + } + return arg; + }); + + // Perform glob-expansion on all arguments after globStart, but preserve + // the arguments before it (like regexes for sed and grep) + if (!config.noglob && options.allowGlobbing === true) { + args = args.slice(0, options.globStart).concat(expand(args.slice(options.globStart))); + } + + try { + // parse options if options are provided + if (isObject(options.cmdOptions)) { + args[0] = parseOptions(args[0], options.cmdOptions); + } + + retValue = fn.apply(this, args); + } catch (e) { + /* istanbul ignore else */ + if (e instanceof CommandError) { + retValue = e.returnValue; + } else { + throw e; // this is probably a bug that should be thrown up the call stack + } + } + } + } catch (e) { + /* istanbul ignore next */ + if (!state.error) { + // If state.error hasn't been set it's an error thrown by Node, not us - probably a bug... + e.name = 'ShellJSInternalError'; + throw e; + } + if (config.fatal || options.handlesFatalDynamically) throw e; + } + + if (options.wrapOutput && + (typeof retValue === 'string' || Array.isArray(retValue))) { + retValue = new ShellString(retValue, state.error, state.errorCode); + } + + state.currentCmd = 'shell.js'; + return retValue; + }; +} // wrap +exports.wrap = wrap; + +// This returns all the input that is piped into the current command (or the +// empty string, if this isn't on the right-hand side of a pipe +function _readFromPipe() { + return state.pipedValue; +} +exports.readFromPipe = _readFromPipe; + +var DEFAULT_WRAP_OPTIONS = { + allowGlobbing: true, + canReceivePipe: false, + cmdOptions: null, + globStart: 1, + handlesFatalDynamically: false, + pipeOnly: false, + wrapOutput: true, + unix: true, +}; + +// This is populated during plugin registration +var pipeMethods = []; + +// Register a new ShellJS command +function _register(name, implementation, wrapOptions) { + wrapOptions = wrapOptions || {}; + + // Validate options + Object.keys(wrapOptions).forEach(function (option) { + if (!DEFAULT_WRAP_OPTIONS.hasOwnProperty(option)) { + throw new Error("Unknown option '" + option + "'"); + } + if (typeof wrapOptions[option] !== typeof DEFAULT_WRAP_OPTIONS[option]) { + throw new TypeError("Unsupported type '" + typeof wrapOptions[option] + + "' for option '" + option + "'"); + } + }); + + // If an option isn't specified, use the default + wrapOptions = Object.assign({}, DEFAULT_WRAP_OPTIONS, wrapOptions); + + if (shell.hasOwnProperty(name)) { + throw new Error('Command `' + name + '` already exists'); + } + + if (wrapOptions.pipeOnly) { + wrapOptions.canReceivePipe = true; + shellMethods[name] = wrap(name, implementation, wrapOptions); + } else { + shell[name] = wrap(name, implementation, wrapOptions); + } + + if (wrapOptions.canReceivePipe) { + pipeMethods.push(name); + } +} +exports.register = _register; diff --git a/node_modules/shelljs/src/cp.js b/node_modules/shelljs/src/cp.js new file mode 100644 index 0000000..af4a0a1 --- /dev/null +++ b/node_modules/shelljs/src/cp.js @@ -0,0 +1,314 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('cp', _cp, { + cmdOptions: { + 'f': '!no_force', + 'n': 'no_force', + 'u': 'update', + 'R': 'recursive', + 'r': 'recursive', + 'L': 'followsymlink', + 'P': 'noFollowsymlink', + 'p': 'preserve', + }, + wrapOutput: false, +}); + +// Buffered file copy, synchronous +// (Using readFileSync() + writeFileSync() could easily cause a memory overflow +// with large files) +function copyFileSync(srcFile, destFile, options) { + if (!fs.existsSync(srcFile)) { + common.error('copyFileSync: no such file or directory: ' + srcFile); + } + + var isWindows = process.platform === 'win32'; + + // Check the mtimes of the files if the '-u' flag is provided + try { + if (options.update && common.statFollowLinks(srcFile).mtime < fs.statSync(destFile).mtime) { + return; + } + } catch (e) { + // If we're here, destFile probably doesn't exist, so just do a normal copy + } + + if (common.statNoFollowLinks(srcFile).isSymbolicLink() && !options.followsymlink) { + try { + common.statNoFollowLinks(destFile); + common.unlinkSync(destFile); // re-link it + } catch (e) { + // it doesn't exist, so no work needs to be done + } + + var symlinkFull = fs.readlinkSync(srcFile); + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + } else { + var buf = common.buffer(); + var bufLength = buf.length; + var bytesRead = bufLength; + var pos = 0; + var fdr = null; + var fdw = null; + var srcStat = common.statFollowLinks(srcFile); + + try { + fdr = fs.openSync(srcFile, 'r'); + } catch (e) { + /* istanbul ignore next */ + common.error('copyFileSync: could not read src file (' + srcFile + ')'); + } + + try { + fdw = fs.openSync(destFile, 'w', srcStat.mode); + } catch (e) { + /* istanbul ignore next */ + common.error('copyFileSync: could not write to dest file (code=' + e.code + '):' + destFile); + } + + while (bytesRead === bufLength) { + bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); + fs.writeSync(fdw, buf, 0, bytesRead); + pos += bytesRead; + } + + if (options.preserve) { + fs.fchownSync(fdw, srcStat.uid, srcStat.gid); + // Note: utimesSync does not work (rounds to seconds), but futimesSync has + // millisecond precision. + fs.futimesSync(fdw, srcStat.atime, srcStat.mtime); + } + + fs.closeSync(fdr); + fs.closeSync(fdw); + } +} + +// Recursively copies 'sourceDir' into 'destDir' +// Adapted from https://github.com/ryanmcgrath/wrench-js +// +// Copyright (c) 2010 Ryan McGrath +// Copyright (c) 2012 Artur Adib +// +// Licensed under the MIT License +// http://www.opensource.org/licenses/mit-license.php +function cpdirSyncRecursive(sourceDir, destDir, currentDepth, opts) { + if (!opts) opts = {}; + + // Ensure there is not a run away recursive copy + if (currentDepth >= common.config.maxdepth) return; + currentDepth++; + + var isWindows = process.platform === 'win32'; + + // Create the directory where all our junk is moving to; read the mode/etc. of + // the source directory (we'll set this on the destDir at the end). + var checkDir = common.statFollowLinks(sourceDir); + try { + fs.mkdirSync(destDir); + } catch (e) { + // if the directory already exists, that's okay + if (e.code !== 'EEXIST') throw e; + } + + var files = fs.readdirSync(sourceDir); + + for (var i = 0; i < files.length; i++) { + var srcFile = sourceDir + '/' + files[i]; + var destFile = destDir + '/' + files[i]; + var srcFileStat = common.statNoFollowLinks(srcFile); + + var symlinkFull; + if (opts.followsymlink) { + if (cpcheckcycle(sourceDir, srcFile)) { + // Cycle link found. + console.error('Cycle link found.'); + symlinkFull = fs.readlinkSync(srcFile); + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + continue; + } + } + if (srcFileStat.isDirectory()) { + /* recursion this thing right on back. */ + cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); + } else if (srcFileStat.isSymbolicLink() && !opts.followsymlink) { + symlinkFull = fs.readlinkSync(srcFile); + try { + common.statNoFollowLinks(destFile); + common.unlinkSync(destFile); // re-link it + } catch (e) { + // it doesn't exist, so no work needs to be done + } + fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); + } else if (srcFileStat.isSymbolicLink() && opts.followsymlink) { + srcFileStat = common.statFollowLinks(srcFile); + if (srcFileStat.isDirectory()) { + cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); + } else { + copyFileSync(srcFile, destFile, opts); + } + } else if (fs.existsSync(destFile) && opts.no_force) { + common.log('skipping existing file: ' + files[i]); + } else { + copyFileSync(srcFile, destFile, opts); + } + } // for files + + // finally change the mode for the newly created directory (otherwise, we + // couldn't add files to a read-only directory). + // var checkDir = common.statFollowLinks(sourceDir); + if (opts.preserve) { + fs.utimesSync(destDir, checkDir.atime, checkDir.mtime); + } + fs.chmodSync(destDir, checkDir.mode); +} // cpdirSyncRecursive + +// Checks if cureent file was created recently +function checkRecentCreated(sources, index) { + var lookedSource = sources[index]; + return sources.slice(0, index).some(function (src) { + return path.basename(src) === path.basename(lookedSource); + }); +} + +function cpcheckcycle(sourceDir, srcFile) { + var srcFileStat = common.statNoFollowLinks(srcFile); + if (srcFileStat.isSymbolicLink()) { + // Do cycle check. For example: + // $ mkdir -p 1/2/3/4 + // $ cd 1/2/3/4 + // $ ln -s ../../3 link + // $ cd ../../../.. + // $ cp -RL 1 copy + var cyclecheck = common.statFollowLinks(srcFile); + if (cyclecheck.isDirectory()) { + var sourcerealpath = fs.realpathSync(sourceDir); + var symlinkrealpath = fs.realpathSync(srcFile); + var re = new RegExp(symlinkrealpath); + if (re.test(sourcerealpath)) { + return true; + } + } + } + return false; +} + +//@ +//@ ### cp([options,] source [, source ...], dest) +//@ ### cp([options,] source_array, dest) +//@ +//@ Available options: +//@ +//@ + `-f`: force (default behavior) +//@ + `-n`: no-clobber +//@ + `-u`: only copy if `source` is newer than `dest` +//@ + `-r`, `-R`: recursive +//@ + `-L`: follow symlinks +//@ + `-P`: don't follow symlinks +//@ + `-p`: preserve file mode, ownership, and timestamps +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cp('file1', 'dir1'); +//@ cp('-R', 'path/to/dir/', '~/newCopy/'); +//@ cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); +//@ cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above +//@ ``` +//@ +//@ Copies files. Returns a [ShellString](#shellstringstr) indicating success +//@ or failure. +function _cp(options, sources, dest) { + // If we're missing -R, it actually implies -L (unless -P is explicit) + if (options.followsymlink) { + options.noFollowsymlink = false; + } + if (!options.recursive && !options.noFollowsymlink) { + options.followsymlink = true; + } + + // Get sources, dest + if (arguments.length < 3) { + common.error('missing and/or '); + } else { + sources = [].slice.call(arguments, 1, arguments.length - 1); + dest = arguments[arguments.length - 1]; + } + + var destExists = fs.existsSync(dest); + var destStat = destExists && common.statFollowLinks(dest); + + // Dest is not existing dir, but multiple sources given + if ((!destExists || !destStat.isDirectory()) && sources.length > 1) { + common.error('dest is not a directory (too many sources)'); + } + + // Dest is an existing file, but -n is given + if (destExists && destStat.isFile() && options.no_force) { + return new common.ShellString('', '', 0); + } + + sources.forEach(function (src, srcIndex) { + if (!fs.existsSync(src)) { + if (src === '') src = "''"; // if src was empty string, display empty string + common.error('no such file or directory: ' + src, { continue: true }); + return; // skip file + } + var srcStat = common.statFollowLinks(src); + if (!options.noFollowsymlink && srcStat.isDirectory()) { + if (!options.recursive) { + // Non-Recursive + common.error("omitting directory '" + src + "'", { continue: true }); + } else { + // Recursive + // 'cp /a/source dest' should create 'source' in 'dest' + var newDest = (destStat && destStat.isDirectory()) ? + path.join(dest, path.basename(src)) : + dest; + + try { + common.statFollowLinks(path.dirname(dest)); + cpdirSyncRecursive(src, newDest, 0, options); + } catch (e) { + /* istanbul ignore next */ + common.error("cannot create directory '" + dest + "': No such file or directory"); + } + } + } else { + // If here, src is a file + + // When copying to '/path/dir': + // thisDest = '/path/dir/file1' + var thisDest = dest; + if (destStat && destStat.isDirectory()) { + thisDest = path.normalize(dest + '/' + path.basename(src)); + } + + var thisDestExists = fs.existsSync(thisDest); + if (thisDestExists && checkRecentCreated(sources, srcIndex)) { + // cannot overwrite file created recently in current execution, but we want to continue copying other files + if (!options.no_force) { + common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); + } + return; + } + + if (thisDestExists && options.no_force) { + return; // skip file + } + + if (path.relative(src, thisDest) === '') { + // a file cannot be copied to itself, but we want to continue copying other files + common.error("'" + thisDest + "' and '" + src + "' are the same file", { continue: true }); + return; + } + + copyFileSync(src, thisDest, options); + } + }); // forEach(src) + + return new common.ShellString('', common.state.error, common.state.errorCode); +} +module.exports = _cp; diff --git a/node_modules/shelljs/src/dirs.js b/node_modules/shelljs/src/dirs.js new file mode 100644 index 0000000..9b7251d --- /dev/null +++ b/node_modules/shelljs/src/dirs.js @@ -0,0 +1,210 @@ +var path = require('path'); +var common = require('./common'); +var _cd = require('./cd'); + +common.register('dirs', _dirs, { + wrapOutput: false, +}); +common.register('pushd', _pushd, { + wrapOutput: false, +}); +common.register('popd', _popd, { + wrapOutput: false, +}); + +// Pushd/popd/dirs internals +var _dirStack = []; + +function _isStackIndex(index) { + return (/^[-+]\d+$/).test(index); +} + +function _parseStackIndex(index) { + if (_isStackIndex(index)) { + if (Math.abs(index) < _dirStack.length + 1) { // +1 for pwd + return (/^-/).test(index) ? Number(index) - 1 : Number(index); + } + common.error(index + ': directory stack index out of range'); + } else { + common.error(index + ': invalid number'); + } +} + +function _actualDirStack() { + return [process.cwd()].concat(_dirStack); +} + +//@ +//@ ### pushd([options,] [dir | '-N' | '+N']) +//@ +//@ Available options: +//@ +//@ + `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. +//@ + `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. +//@ + `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ // process.cwd() === '/usr' +//@ pushd('/etc'); // Returns /etc /usr +//@ pushd('+1'); // Returns /usr /etc +//@ ``` +//@ +//@ Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. +function _pushd(options, dir) { + if (_isStackIndex(options)) { + dir = options; + options = ''; + } + + options = common.parseOptions(options, { + 'n': 'no-cd', + 'q': 'quiet', + }); + + var dirs = _actualDirStack(); + + if (dir === '+0') { + return dirs; // +0 is a noop + } else if (!dir) { + if (dirs.length > 1) { + dirs = dirs.splice(1, 1).concat(dirs); + } else { + return common.error('no other directory'); + } + } else if (_isStackIndex(dir)) { + var n = _parseStackIndex(dir); + dirs = dirs.slice(n).concat(dirs.slice(0, n)); + } else if (options['no-cd']) { + dirs.splice(1, 0, dir); + } else { + dirs.unshift(dir); + } + + if (options['no-cd']) { + dirs = dirs.slice(1); + } else { + dir = path.resolve(dirs.shift()); + _cd('', dir); + } + + _dirStack = dirs; + return _dirs(options.quiet ? '-q' : ''); +} +exports.pushd = _pushd; + +//@ +//@ +//@ ### popd([options,] ['-N' | '+N']) +//@ +//@ Available options: +//@ +//@ + `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. +//@ + `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ echo(process.cwd()); // '/usr' +//@ pushd('/etc'); // '/etc /usr' +//@ echo(process.cwd()); // '/etc' +//@ popd(); // '/usr' +//@ echo(process.cwd()); // '/usr' +//@ ``` +//@ +//@ When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. +function _popd(options, index) { + if (_isStackIndex(options)) { + index = options; + options = ''; + } + + options = common.parseOptions(options, { + 'n': 'no-cd', + 'q': 'quiet', + }); + + if (!_dirStack.length) { + return common.error('directory stack empty'); + } + + index = _parseStackIndex(index || '+0'); + + if (options['no-cd'] || index > 0 || _dirStack.length + index === 0) { + index = index > 0 ? index - 1 : index; + _dirStack.splice(index, 1); + } else { + var dir = path.resolve(_dirStack.shift()); + _cd('', dir); + } + + return _dirs(options.quiet ? '-q' : ''); +} +exports.popd = _popd; + +//@ +//@ +//@ ### dirs([options | '+N' | '-N']) +//@ +//@ Available options: +//@ +//@ + `-c`: Clears the directory stack by deleting all of the elements. +//@ + `-q`: Suppresses output to the console. +//@ +//@ Arguments: +//@ +//@ + `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. +//@ + `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. +//@ +//@ Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. +//@ +//@ See also: `pushd`, `popd` +function _dirs(options, index) { + if (_isStackIndex(options)) { + index = options; + options = ''; + } + + options = common.parseOptions(options, { + 'c': 'clear', + 'q': 'quiet', + }); + + if (options.clear) { + _dirStack = []; + return _dirStack; + } + + var stack = _actualDirStack(); + + if (index) { + index = _parseStackIndex(index); + + if (index < 0) { + index = stack.length + index; + } + + if (!options.quiet) { + common.log(stack[index]); + } + return stack[index]; + } + + if (!options.quiet) { + common.log(stack.join(' ')); + } + + return stack; +} +exports.dirs = _dirs; diff --git a/node_modules/shelljs/src/echo.js b/node_modules/shelljs/src/echo.js new file mode 100644 index 0000000..da37f43 --- /dev/null +++ b/node_modules/shelljs/src/echo.js @@ -0,0 +1,62 @@ +var format = require('util').format; + +var common = require('./common'); + +common.register('echo', _echo, { + allowGlobbing: false, +}); + +//@ +//@ ### echo([options,] string [, string ...]) +//@ +//@ Available options: +//@ +//@ + `-e`: interpret backslash escapes (default) +//@ + `-n`: remove trailing newline from output +//@ +//@ Examples: +//@ +//@ ```javascript +//@ echo('hello world'); +//@ var str = echo('hello world'); +//@ echo('-n', 'no newline at end'); +//@ ``` +//@ +//@ Prints `string` to stdout, and returns a [ShellString](#shellstringstr). +function _echo(opts) { + // allow strings starting with '-', see issue #20 + var messages = [].slice.call(arguments, opts ? 0 : 1); + var options = {}; + + // If the first argument starts with '-', parse it as options string. + // If parseOptions throws, it wasn't an options string. + try { + options = common.parseOptions(messages[0], { + 'e': 'escapes', + 'n': 'no_newline', + }, { + silent: true, + }); + + // Allow null to be echoed + if (messages[0]) { + messages.shift(); + } + } catch (_) { + // Clear out error if an error occurred + common.state.error = null; + } + + var output = format.apply(null, messages); + + // Add newline if -n is not passed. + if (!options.no_newline) { + output += '\n'; + } + + process.stdout.write(output); + + return output; +} + +module.exports = _echo; diff --git a/node_modules/shelljs/src/error.js b/node_modules/shelljs/src/error.js new file mode 100644 index 0000000..b0ed59e --- /dev/null +++ b/node_modules/shelljs/src/error.js @@ -0,0 +1,15 @@ +var common = require('./common'); + +//@ +//@ ### error() +//@ +//@ Tests if error occurred in the last command. Returns a truthy value if an +//@ error returned, or a falsy value otherwise. +//@ +//@ **Note**: do not rely on the +//@ return value to be an error message. If you need the last error message, use +//@ the `.stderr` attribute from the last command's return value instead. +function error() { + return common.state.error; +} +module.exports = error; diff --git a/node_modules/shelljs/src/errorCode.js b/node_modules/shelljs/src/errorCode.js new file mode 100644 index 0000000..a1c7fd2 --- /dev/null +++ b/node_modules/shelljs/src/errorCode.js @@ -0,0 +1,10 @@ +var common = require('./common'); + +//@ +//@ ### errorCode() +//@ +//@ Returns the error code from the last command. +function errorCode() { + return common.state.errorCode; +} +module.exports = errorCode; diff --git a/node_modules/shelljs/src/exec-child.js b/node_modules/shelljs/src/exec-child.js new file mode 100644 index 0000000..e8446f6 --- /dev/null +++ b/node_modules/shelljs/src/exec-child.js @@ -0,0 +1,71 @@ +var childProcess = require('child_process'); +var fs = require('fs'); + +function main() { + var paramFilePath = process.argv[2]; + + var serializedParams = fs.readFileSync(paramFilePath, 'utf8'); + var params = JSON.parse(serializedParams); + + var cmd = params.command; + var execOptions = params.execOptions; + var pipe = params.pipe; + var stdoutFile = params.stdoutFile; + var stderrFile = params.stderrFile; + + function isMaxBufferError(err) { + var maxBufferErrorPattern = /^.*\bmaxBuffer\b.*exceeded.*$/; + if (err instanceof Error && err.message && + err.message.match(maxBufferErrorPattern)) { + // < v10 + // Error: stdout maxBuffer exceeded + return true; + } else if (err instanceof RangeError && err.message && + err.message.match(maxBufferErrorPattern)) { + // >= v10 + // RangeError [ERR_CHILD_PROCESS_STDIO_MAXBUFFER]: stdout maxBuffer length + // exceeded + return true; + } + return false; + } + + var stdoutStream = fs.createWriteStream(stdoutFile); + var stderrStream = fs.createWriteStream(stderrFile); + + function appendError(message, code) { + stderrStream.write(message); + process.exitCode = code; + } + + var c = childProcess.exec(cmd, execOptions, function (err) { + if (!err) { + process.exitCode = 0; + } else if (isMaxBufferError(err)) { + appendError('maxBuffer exceeded', 1); + } else if (err.code === undefined && err.message) { + /* istanbul ignore next */ + appendError(err.message, 1); + } else if (err.code === undefined) { + /* istanbul ignore next */ + appendError('Unknown issue', 1); + } else { + process.exitCode = err.code; + } + }); + + c.stdout.pipe(stdoutStream); + c.stderr.pipe(stderrStream); + c.stdout.pipe(process.stdout); + c.stderr.pipe(process.stderr); + + if (pipe) { + c.stdin.end(pipe); + } +} + +// This file should only be executed. This module does not export anything. +/* istanbul ignore else */ +if (require.main === module) { + main(); +} diff --git a/node_modules/shelljs/src/exec.js b/node_modules/shelljs/src/exec.js new file mode 100644 index 0000000..3907769 --- /dev/null +++ b/node_modules/shelljs/src/exec.js @@ -0,0 +1,255 @@ +var path = require('path'); +var fs = require('fs'); +var child = require('child_process'); +var common = require('./common'); +var _tempDir = require('./tempdir').tempDir; +var _pwd = require('./pwd'); + +var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; +var DEFAULT_ERROR_CODE = 1; + +common.register('exec', _exec, { + unix: false, + canReceivePipe: true, + wrapOutput: false, + handlesFatalDynamically: true, +}); + +// We use this function to run `exec` synchronously while also providing realtime +// output. +function execSync(cmd, opts, pipe) { + if (!common.config.execPath) { + try { + common.error('Unable to find a path to the node binary. Please manually set config.execPath'); + } catch (e) { + if (opts.fatal) { + throw e; + } + + return; + } + } + + var tempDir = _tempDir(); + var paramsFile = path.join(tempDir, common.randomFileName()); + var stderrFile = path.join(tempDir, common.randomFileName()); + var stdoutFile = path.join(tempDir, common.randomFileName()); + + opts = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary + cwd: _pwd().toString(), + env: process.env, + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + encoding: 'utf8', + }, opts); + + if (fs.existsSync(paramsFile)) common.unlinkSync(paramsFile); + if (fs.existsSync(stderrFile)) common.unlinkSync(stderrFile); + if (fs.existsSync(stdoutFile)) common.unlinkSync(stdoutFile); + + opts.cwd = path.resolve(opts.cwd); + + var paramsToSerialize = { + command: cmd, + execOptions: opts, + pipe, + stdoutFile, + stderrFile, + }; + + // Create the files and ensure these are locked down (for read and write) to + // the current user. The main concerns here are: + // + // * If we execute a command which prints sensitive output, then + // stdoutFile/stderrFile must not be readable by other users. + // * paramsFile must not be readable by other users, or else they can read it + // to figure out the path for stdoutFile/stderrFile and create these first + // (locked down to their own access), which will crash exec() when it tries + // to write to the files. + function writeFileLockedDown(filePath, data) { + fs.writeFileSync(filePath, data, { + encoding: 'utf8', + mode: parseInt('600', 8), + }); + } + writeFileLockedDown(stdoutFile, ''); + writeFileLockedDown(stderrFile, ''); + writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize)); + + var execArgs = [ + path.join(__dirname, 'exec-child.js'), + paramsFile, + ]; + + /* istanbul ignore else */ + if (opts.silent) { + opts.stdio = 'ignore'; + } else { + opts.stdio = [0, 1, 2]; + } + + var code = 0; + + // Welcome to the future + try { + // Bad things if we pass in a `shell` option to child_process.execFileSync, + // so we need to explicitly remove it here. + delete opts.shell; + + child.execFileSync(common.config.execPath, execArgs, opts); + } catch (e) { + // Commands with non-zero exit code raise an exception. + code = e.status || DEFAULT_ERROR_CODE; + } + + // fs.readFileSync uses buffer encoding by default, so call + // it without the encoding option if the encoding is 'buffer'. + // Also, if the exec timeout is too short for node to start up, + // the files will not be created, so these calls will throw. + var stdout = ''; + var stderr = ''; + if (opts.encoding === 'buffer') { + stdout = fs.readFileSync(stdoutFile); + stderr = fs.readFileSync(stderrFile); + } else { + stdout = fs.readFileSync(stdoutFile, opts.encoding); + stderr = fs.readFileSync(stderrFile, opts.encoding); + } + + // No biggie if we can't erase the files now -- they're in a temp dir anyway + // and we locked down permissions (see the note above). + try { common.unlinkSync(paramsFile); } catch (e) {} + try { common.unlinkSync(stderrFile); } catch (e) {} + try { common.unlinkSync(stdoutFile); } catch (e) {} + + if (code !== 0) { + // Note: `silent` should be unconditionally true to avoid double-printing + // the command's stderr, and to avoid printing any stderr when the user has + // set `shell.config.silent`. + common.error(stderr, code, { continue: true, silent: true, fatal: opts.fatal }); + } + var obj = common.ShellString(stdout, stderr, code); + return obj; +} // execSync() + +// Wrapper around exec() to enable echoing output to console in real time +function execAsync(cmd, opts, pipe, callback) { + opts = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary + cwd: _pwd().toString(), + env: process.env, + maxBuffer: DEFAULT_MAXBUFFER_SIZE, + encoding: 'utf8', + }, opts); + + var c = child.exec(cmd, opts, function (err, stdout, stderr) { + if (callback) { + if (!err) { + callback(0, stdout, stderr); + } else if (err.code === undefined) { + // See issue #536 + /* istanbul ignore next */ + callback(1, stdout, stderr); + } else { + callback(err.code, stdout, stderr); + } + } + }); + + if (pipe) c.stdin.end(pipe); + + if (!opts.silent) { + c.stdout.pipe(process.stdout); + c.stderr.pipe(process.stderr); + } + + return c; +} + +//@ +//@ ### exec(command [, options] [, callback]) +//@ +//@ Available options: +//@ +//@ + `async`: Asynchronous execution. If a callback is provided, it will be set to +//@ `true`, regardless of the passed value (default: `false`). +//@ + `fatal`: Exit upon error (default: `false`). +//@ + `silent`: Do not echo program output to console (default: `false`). +//@ + `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and +//@ what is written to stdout and stderr when not in silent mode (default: `'utf8'`). +//@ + and any option available to Node.js's +//@ [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var version = exec('node --version', {silent:true}).stdout; +//@ +//@ var child = exec('some_long_running_process', {async:true}); +//@ child.stdout.on('data', function(data) { +//@ /* ... do something with data ... */ +//@ }); +//@ +//@ exec('some_long_running_process', function(code, stdout, stderr) { +//@ console.log('Exit code:', code); +//@ console.log('Program output:', stdout); +//@ console.log('Program stderr:', stderr); +//@ }); +//@ ``` +//@ +//@ Executes the given `command` _synchronously_, unless otherwise specified. +//@ When in synchronous mode, this returns a [ShellString](#shellstringstr). +//@ Otherwise, this returns the child process object, and the `callback` +//@ receives the arguments `(code, stdout, stderr)`. +//@ +//@ Not seeing the behavior you want? `exec()` runs everything through `sh` +//@ by default (or `cmd.exe` on Windows), which differs from `bash`. If you +//@ need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. +//@ +//@ **Security note:** as `shell.exec()` executes an arbitrary string in the +//@ system shell, it is **critical** to properly sanitize user input to avoid +//@ **command injection**. For more context, consult the [Security +//@ Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). +function _exec(command, options, callback) { + options = options || {}; + + var pipe = common.readFromPipe(); + + // Callback is defined instead of options. + if (typeof options === 'function') { + callback = options; + options = { async: true }; + } + + // Callback is defined with options. + if (typeof options === 'object' && typeof callback === 'function') { + options.async = true; + } + + options = common.extend({ + silent: common.config.silent, + fatal: common.config.fatal, + async: false, + }, options); + + if (!command) { + try { + common.error('must specify command'); + } catch (e) { + if (options.fatal) { + throw e; + } + + return; + } + } + + if (options.async) { + return execAsync(command, options, pipe, callback); + } else { + return execSync(command, options, pipe); + } +} +module.exports = _exec; diff --git a/node_modules/shelljs/src/find.js b/node_modules/shelljs/src/find.js new file mode 100644 index 0000000..80db993 --- /dev/null +++ b/node_modules/shelljs/src/find.js @@ -0,0 +1,66 @@ +var path = require('path'); +var common = require('./common'); +var _ls = require('./ls'); + +common.register('find', _find, { + cmdOptions: { + 'L': 'link', + }, +}); + +//@ +//@ ### find(path [, path ...]) +//@ ### find(path_array) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ find('src', 'lib'); +//@ find(['src', 'lib']); // same as above +//@ find('.').filter(function(file) { return file.match(/\.js$/); }); +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all +//@ files (however deep) in the given paths. +//@ +//@ The main difference from `ls('-R', path)` is that the resulting file names +//@ include the base directories (e.g., `lib/resources/file1` instead of just `file1`). +function _find(options, paths) { + if (!paths) { + common.error('no path specified'); + } else if (typeof paths === 'string') { + paths = [].slice.call(arguments, 1); + } + + var list = []; + + function pushFile(file) { + if (process.platform === 'win32') { + file = file.replace(/\\/g, '/'); + } + list.push(file); + } + + // why not simply do `ls('-R', paths)`? because the output wouldn't give the base dirs + // to get the base dir in the output, we need instead `ls('-R', 'dir/*')` for every directory + + paths.forEach(function (file) { + var stat; + try { + stat = common.statFollowLinks(file); + } catch (e) { + common.error('no such file or directory: ' + file); + } + + pushFile(file); + + if (stat.isDirectory()) { + _ls({ recursive: true, all: true, link: options.link }, file).forEach(function (subfile) { + pushFile(path.join(file, subfile)); + }); + } + }); + + return list; +} +module.exports = _find; diff --git a/node_modules/shelljs/src/grep.js b/node_modules/shelljs/src/grep.js new file mode 100644 index 0000000..cfc83e4 --- /dev/null +++ b/node_modules/shelljs/src/grep.js @@ -0,0 +1,198 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('grep', _grep, { + globStart: 2, // don't glob-expand the regex + canReceivePipe: true, + cmdOptions: { + 'v': 'inverse', + 'l': 'nameOnly', + 'i': 'ignoreCase', + 'n': 'lineNumber', + 'B': 'beforeContext', + 'A': 'afterContext', + 'C': 'context', + }, +}); + +//@ +//@ ### grep([options,] regex_filter, file [, file ...]) +//@ ### grep([options,] regex_filter, file_array) +//@ +//@ Available options: +//@ +//@ + `-v`: Invert `regex_filter` (only print non-matching lines). +//@ + `-l`: Print only filenames of matching files. +//@ + `-i`: Ignore case. +//@ + `-n`: Print line numbers. +//@ + `-B `: Show `` lines before each result. +//@ + `-A `: Show `` lines after each result. +//@ + `-C `: Show `` lines before and after each result. -B and -A override this option. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ grep('-v', 'GLOBAL_VARIABLE', '*.js'); +//@ grep('GLOBAL_VARIABLE', '*.js'); +//@ grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); +//@ grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); +//@ grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); +//@ ``` +//@ +//@ Reads input string from given files and returns a +//@ [ShellString](#shellstringstr) containing all lines of the @ file that match +//@ the given `regex_filter`. +function _grep(options, regex, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given', 2); + + var idx = 2; + var contextError = ': invalid context length argument'; + // If the option has been found but not read, copy value from arguments + if (options.beforeContext === true) { + idx = 3; + options.beforeContext = Number(arguments[1]); + if (options.beforeContext < 0) { + common.error(options.beforeContext + contextError, 2); + } + } + if (options.afterContext === true) { + idx = 3; + options.afterContext = Number(arguments[1]); + if (options.afterContext < 0) { + common.error(options.afterContext + contextError, 2); + } + } + if (options.context === true) { + idx = 3; + options.context = Number(arguments[1]); + if (options.context < 0) { + common.error(options.context + contextError, 2); + } + } + // If before or after not given but context is, update values + if (typeof options.context === 'number') { + if (options.beforeContext === false) { + options.beforeContext = options.context; + } + if (options.afterContext === false) { + options.afterContext = options.context; + } + } + regex = arguments[idx - 1]; + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var grep = []; + if (options.ignoreCase) { + regex = new RegExp(regex, 'i'); + } + files.forEach(function (file) { + if (!fs.existsSync(file) && file !== '-') { + common.error('no such file or directory: ' + file, 2, { continue: true }); + return; + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + if (options.nameOnly) { + if (contents.match(regex)) { + grep.push(file); + } + } else { + var lines = contents.split('\n'); + var matches = []; + + lines.forEach(function (line, index) { + var matched = line.match(regex); + if ((options.inverse && !matched) || (!options.inverse && matched)) { + var lineNumber = index + 1; + var result = {}; + if (matches.length > 0) { + // If the last result intersects, combine them + var last = matches[matches.length - 1]; + var minimumLineNumber = Math.max( + 1, + lineNumber - options.beforeContext - 1, + ); + if ( + last.hasOwnProperty('' + lineNumber) || + last.hasOwnProperty('' + minimumLineNumber) + ) { + result = last; + } + } + result[lineNumber] = { + line, + match: true, + }; + if (options.beforeContext > 0) { + // Store the lines with their line numbers to check for overlap + lines + .slice(Math.max(index - options.beforeContext, 0), index) + .forEach(function (v, i, a) { + var lineNum = '' + (index - a.length + i + 1); + if (!result.hasOwnProperty(lineNum)) { + result[lineNum] = { line: v, match: false }; + } + }); + } + if (options.afterContext > 0) { + // Store the lines with their line numbers to check for overlap + lines + .slice( + index + 1, + Math.min(index + options.afterContext + 1, lines.length - 1), + ) + .forEach(function (v, i) { + var lineNum = '' + (index + 1 + i + 1); + if (!result.hasOwnProperty(lineNum)) { + result[lineNum] = { line: v, match: false }; + } + }); + } + // Only add the result if it's new + if (!matches.includes(result)) { + matches.push(result); + } + } + }); + + // Loop through the matches and add them to the output + Array.prototype.push.apply( + grep, + matches.map(function (result) { + return Object.entries(result) + .map(function (entry) { + var lineNumber = entry[0]; + var line = entry[1].line; + var match = entry[1].match; + return options.lineNumber + ? lineNumber + (match ? ':' : '-') + line + : line; + }) + .join('\n'); + }), + ); + } + }); + + if (grep.length === 0 && common.state.errorCode !== 2) { + // We didn't hit the error above, but pattern didn't match + common.error('', { silent: true }); + } + + var separator = '\n'; + if ( + typeof options.beforeContext === 'number' || + typeof options.afterContext === 'number' + ) { + separator = '\n--\n'; + } + return grep.join(separator) + '\n'; +} +module.exports = _grep; diff --git a/node_modules/shelljs/src/head.js b/node_modules/shelljs/src/head.js new file mode 100644 index 0000000..f3f4f22 --- /dev/null +++ b/node_modules/shelljs/src/head.js @@ -0,0 +1,107 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('head', _head, { + canReceivePipe: true, + cmdOptions: { + 'n': 'numLines', + }, +}); + +// Reads |numLines| lines or the entire file, whichever is less. +function readSomeLines(file, numLines) { + var buf = common.buffer(); + var bufLength = buf.length; + var bytesRead = bufLength; + var pos = 0; + + var fdr = fs.openSync(file, 'r'); + var numLinesRead = 0; + var ret = ''; + while (bytesRead === bufLength && numLinesRead < numLines) { + bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); + var bufStr = buf.toString('utf8', 0, bytesRead); + numLinesRead += bufStr.split('\n').length - 1; + ret += bufStr; + pos += bytesRead; + } + + fs.closeSync(fdr); + return ret; +} + +//@ +//@ ### head([{'-n': \},] file [, file ...]) +//@ ### head([{'-n': \},] file_array) +//@ +//@ Available options: +//@ +//@ + `-n `: Show the first `` lines of the files +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = head({'-n': 1}, 'file*.txt'); +//@ var str = head('file1', 'file2'); +//@ var str = head(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Read the start of a `file`. Returns a [ShellString](#shellstringstr). +function _head(options, files) { + var head = []; + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given'); + + var idx = 1; + if (options.numLines === true) { + idx = 2; + options.numLines = Number(arguments[1]); + } else if (options.numLines === false) { + options.numLines = 10; + } + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var shouldAppendNewline = false; + files.forEach(function (file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error("error reading '" + file + "': Is a directory", { + continue: true, + }); + return; + } + } + + var contents; + if (file === '-') { + contents = pipe; + } else if (options.numLines < 0) { + contents = fs.readFileSync(file, 'utf8'); + } else { + contents = readSomeLines(file, options.numLines); + } + + var lines = contents.split('\n'); + var hasTrailingNewline = (lines[lines.length - 1] === ''); + if (hasTrailingNewline) { + lines.pop(); + } + shouldAppendNewline = (hasTrailingNewline || options.numLines < lines.length); + + head = head.concat(lines.slice(0, options.numLines)); + }); + + if (shouldAppendNewline) { + head.push(''); // to add a trailing newline once we join + } + return head.join('\n'); +} +module.exports = _head; diff --git a/node_modules/shelljs/src/ln.js b/node_modules/shelljs/src/ln.js new file mode 100644 index 0000000..1d3d0e7 --- /dev/null +++ b/node_modules/shelljs/src/ln.js @@ -0,0 +1,75 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('ln', _ln, { + cmdOptions: { + 's': 'symlink', + 'f': 'force', + }, +}); + +//@ +//@ ### ln([options,] source, dest) +//@ +//@ Available options: +//@ +//@ + `-s`: symlink +//@ + `-f`: force +//@ +//@ Examples: +//@ +//@ ```javascript +//@ ln('file', 'newlink'); +//@ ln('-sf', 'file', 'existing'); +//@ ``` +//@ +//@ Links `source` to `dest`. Use `-f` to force the link, should `dest` already +//@ exist. Returns a [ShellString](#shellstringstr) indicating success or +//@ failure. +function _ln(options, source, dest) { + if (!source || !dest) { + common.error('Missing and/or '); + } + + source = String(source); + var sourcePath = path.normalize(source).replace(RegExp(path.sep + '$'), ''); + var isAbsolute = (path.resolve(source) === sourcePath); + dest = path.resolve(process.cwd(), String(dest)); + + if (fs.existsSync(dest)) { + if (!options.force) { + common.error('Destination file exists', { continue: true }); + } + + fs.unlinkSync(dest); + } + + if (options.symlink) { + var isWindows = process.platform === 'win32'; + var linkType = isWindows ? 'file' : null; + var resolvedSourcePath = isAbsolute ? sourcePath : path.resolve(process.cwd(), path.dirname(dest), source); + if (!fs.existsSync(resolvedSourcePath)) { + common.error('Source file does not exist', { continue: true }); + } else if (isWindows && common.statFollowLinks(resolvedSourcePath).isDirectory()) { + linkType = 'junction'; + } + + try { + fs.symlinkSync(linkType === 'junction' ? resolvedSourcePath : source, dest, linkType); + } catch (err) { + common.error(err.message); + } + } else { + if (!fs.existsSync(source)) { + common.error('Source file does not exist', { continue: true }); + } + try { + fs.linkSync(source, dest); + } catch (err) { + common.error(err.message); + } + } + return ''; +} +module.exports = _ln; diff --git a/node_modules/shelljs/src/ls.js b/node_modules/shelljs/src/ls.js new file mode 100644 index 0000000..7f32c6e --- /dev/null +++ b/node_modules/shelljs/src/ls.js @@ -0,0 +1,155 @@ +var path = require('path'); +var fs = require('fs'); +var glob = require('fast-glob'); +var common = require('./common'); + +// glob patterns use the UNIX path seperator +var globPatternRecursive = '/**'; + +common.register('ls', _ls, { + cmdOptions: { + 'R': 'recursive', + 'A': 'all', + 'L': 'link', + 'a': 'all_deprecated', + 'd': 'directory', + 'l': 'long', + }, +}); + +//@ +//@ ### ls([options,] [path, ...]) +//@ ### ls([options,] path_array) +//@ +//@ Available options: +//@ +//@ + `-R`: recursive +//@ + `-A`: all files (include files beginning with `.`, except for `.` and `..`) +//@ + `-L`: follow symlinks +//@ + `-d`: list directories themselves, not their contents +//@ + `-l`: provides more details for each file. Specifically, each file is +//@ represented by a structured object with separate fields for file +//@ metadata (see +//@ [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The +//@ return value also overrides `.toString()` to resemble `ls -l`'s +//@ output format for human readability, but programmatic usage should +//@ depend on the stable object format rather than the `.toString()` +//@ representation. +//@ +//@ Examples: +//@ +//@ ```javascript +//@ ls('projs/*.js'); +//@ ls('projs/**/*.js'); // Find all js files recursively in projs +//@ ls('-R', '/users/me', '/tmp'); +//@ ls('-R', ['/users/me', '/tmp']); // same as above +//@ ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} +//@ ``` +//@ +//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all +//@ the files in the given `path`, or files in the current directory if no +//@ `path` is provided. +function _ls(options, paths) { + if (options.all_deprecated) { + // We won't support the -a option as it's hard to image why it's useful + // (it includes '.' and '..' in addition to '.*' files) + // For backwards compatibility we'll dump a deprecated message and proceed as before + common.log('ls: Option -a is deprecated. Use -A instead'); + options.all = true; + } + + if (!paths) { + paths = ['.']; + } else { + paths = [].slice.call(arguments, 1); + } + + var list = []; + + function pushFile(abs, relName, stat) { + if (process.platform === 'win32') { + relName = relName.replace(/\\/g, '/'); + } + if (options.long) { + stat = stat || (options.link ? common.statFollowLinks(abs) : common.statNoFollowLinks(abs)); + list.push(addLsAttributes(relName, stat)); + } else { + // list.push(path.relative(rel || '.', file)); + list.push(relName); + } + } + + paths.forEach(function (p) { + var stat; + + try { + stat = options.link ? common.statFollowLinks(p) : common.statNoFollowLinks(p); + // follow links to directories by default + if (stat.isSymbolicLink()) { + /* istanbul ignore next */ + // workaround for https://github.com/shelljs/shelljs/issues/795 + // codecov seems to have a bug that miscalculate this block as uncovered. + // but according to nyc report this block does get covered. + try { + var _stat = common.statFollowLinks(p); + if (_stat.isDirectory()) { + stat = _stat; + } + } catch (_) {} // bad symlink, treat it like a file + } + } catch (e) { + common.error('no such file or directory: ' + p, 2, { continue: true }); + return; + } + + // If the stat succeeded + if (stat.isDirectory() && !options.directory) { + if (options.recursive) { + // use glob, because it's simple + glob.sync(p + globPatternRecursive, { + // These options are just to make fast-glob be compatible with POSIX + // (bash) wildcard behavior. + onlyFiles: false, + + // These options depend on the cmdOptions provided to ls. + dot: options.all, + followSymbolicLinks: options.link, + }).forEach(function (item) { + // Glob pattern returns the directory itself and needs to be filtered out. + if (path.relative(p, item)) { + pushFile(item, path.relative(p, item)); + } + }); + } else if (options.all) { + // use fs.readdirSync, because it's fast + fs.readdirSync(p).forEach(function (item) { + pushFile(path.join(p, item), item); + }); + } else { + // use fs.readdirSync and then filter out secret files + fs.readdirSync(p).forEach(function (item) { + if (item[0] !== '.') { + pushFile(path.join(p, item), item); + } + }); + } + } else { + pushFile(p, p, stat); + } + }); + + // Add methods, to make this more compatible with ShellStrings + return list; +} + +function addLsAttributes(pathName, stats) { + // Note: this object will contain more information than .toString() returns + stats.name = pathName; + stats.toString = function () { + // Return a string resembling unix's `ls -l` format + return [this.mode, this.nlink, this.uid, this.gid, this.size, this.mtime, this.name].join(' '); + }; + return stats; +} + +module.exports = _ls; diff --git a/node_modules/shelljs/src/mkdir.js b/node_modules/shelljs/src/mkdir.js new file mode 100644 index 0000000..021cad9 --- /dev/null +++ b/node_modules/shelljs/src/mkdir.js @@ -0,0 +1,102 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('mkdir', _mkdir, { + cmdOptions: { + 'p': 'fullpath', + }, +}); + +// Recursively creates `dir` +function mkdirSyncRecursive(dir) { + var baseDir = path.dirname(dir); + + // Prevents some potential problems arising from malformed UNCs or + // insufficient permissions. + /* istanbul ignore next */ + if (baseDir === dir) { + common.error('dirname() failed: [' + dir + ']'); + } + + // Base dir does not exist, go recursive + if (!fs.existsSync(baseDir)) { + mkdirSyncRecursive(baseDir); + } + + try { + // Base dir created, can create dir + fs.mkdirSync(dir, parseInt('0777', 8)); + } catch (e) { + // swallow error if dir already exists + if (e.code !== 'EEXIST' || common.statNoFollowLinks(dir).isFile()) { throw e; } + } +} + +//@ +//@ ### mkdir([options,] dir [, dir ...]) +//@ ### mkdir([options,] dir_array) +//@ +//@ Available options: +//@ +//@ + `-p`: full path (and create intermediate directories, if necessary) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); +//@ mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above +//@ ``` +//@ +//@ Creates directories. Returns a [ShellString](#shellstringstr) indicating +//@ success or failure. +function _mkdir(options, dirs) { + if (!dirs) common.error('no paths given'); + + if (typeof dirs === 'string') { + dirs = [].slice.call(arguments, 1); + } + // if it's array leave it as it is + + dirs.forEach(function (dir) { + try { + var stat = common.statNoFollowLinks(dir); + if (!options.fullpath) { + common.error('path already exists: ' + dir, { continue: true }); + } else if (stat.isFile()) { + common.error('cannot create directory ' + dir + ': File exists', { continue: true }); + } + return; // skip dir + } catch (e) { + // do nothing + } + + // Base dir does not exist, and no -p option given + var baseDir = path.dirname(dir); + if (!fs.existsSync(baseDir) && !options.fullpath) { + common.error('no such file or directory: ' + baseDir, { continue: true }); + return; // skip dir + } + + try { + if (options.fullpath) { + mkdirSyncRecursive(path.resolve(dir)); + } else { + fs.mkdirSync(dir, parseInt('0777', 8)); + } + } catch (e) { + var reason; + if (e.code === 'EACCES') { + reason = 'Permission denied'; + } else if (e.code === 'ENOTDIR' || e.code === 'ENOENT') { + reason = 'Not a directory'; + } else { + /* istanbul ignore next */ + throw e; + } + common.error('cannot create directory ' + dir + ': ' + reason, { continue: true }); + } + }); + return ''; +} // man arraykdir +module.exports = _mkdir; diff --git a/node_modules/shelljs/src/mv.js b/node_modules/shelljs/src/mv.js new file mode 100644 index 0000000..6e89e2f --- /dev/null +++ b/node_modules/shelljs/src/mv.js @@ -0,0 +1,119 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); +var cp = require('./cp'); +var rm = require('./rm'); + +common.register('mv', _mv, { + cmdOptions: { + 'f': '!no_force', + 'n': 'no_force', + }, +}); + +// Checks if cureent file was created recently +function checkRecentCreated(sources, index) { + var lookedSource = sources[index]; + return sources.slice(0, index).some(function (src) { + return path.basename(src) === path.basename(lookedSource); + }); +} + +//@ +//@ ### mv([options ,] source [, source ...], dest') +//@ ### mv([options ,] source_array, dest') +//@ +//@ Available options: +//@ +//@ + `-f`: force (default behavior) +//@ + `-n`: no-clobber +//@ +//@ Examples: +//@ +//@ ```javascript +//@ mv('-n', 'file', 'dir/'); +//@ mv('file1', 'file2', 'dir/'); +//@ mv(['file1', 'file2'], 'dir/'); // same as above +//@ ``` +//@ +//@ Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) +//@ indicating success or failure. +function _mv(options, sources, dest) { + // Get sources, dest + if (arguments.length < 3) { + common.error('missing and/or '); + } else if (arguments.length > 3) { + sources = [].slice.call(arguments, 1, arguments.length - 1); + dest = arguments[arguments.length - 1]; + } else if (typeof sources === 'string') { + sources = [sources]; + } else { + // TODO(nate): figure out if we actually need this line + common.error('invalid arguments'); + } + + var exists = fs.existsSync(dest); + var stats = exists && common.statFollowLinks(dest); + + // Dest is not existing dir, but multiple sources given + if ((!exists || !stats.isDirectory()) && sources.length > 1) { + common.error('dest is not a directory (too many sources)'); + } + + // Dest is an existing file, but no -f given + if (exists && stats.isFile() && options.no_force) { + common.error('dest file already exists: ' + dest); + } + + sources.forEach(function (src, srcIndex) { + if (!fs.existsSync(src)) { + common.error('no such file or directory: ' + src, { continue: true }); + return; // skip file + } + + // If here, src exists + + // When copying to '/path/dir': + // thisDest = '/path/dir/file1' + var thisDest = dest; + if (fs.existsSync(dest) && common.statFollowLinks(dest).isDirectory()) { + thisDest = path.normalize(dest + '/' + path.basename(src)); + } + + var thisDestExists = fs.existsSync(thisDest); + + if (thisDestExists && checkRecentCreated(sources, srcIndex)) { + // cannot overwrite file created recently in current execution, but we want to continue copying other files + if (!options.no_force) { + common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); + } + return; + } + + if (fs.existsSync(thisDest) && options.no_force) { + common.error('dest file already exists: ' + thisDest, { continue: true }); + return; // skip file + } + + if (path.resolve(src) === path.dirname(path.resolve(thisDest))) { + common.error('cannot move to self: ' + src, { continue: true }); + return; // skip file + } + + try { + fs.renameSync(src, thisDest); + } catch (e) { + /* istanbul ignore next */ + if (e.code === 'EXDEV') { + // If we're trying to `mv` to an external partition, we'll actually need + // to perform a copy and then clean up the original file. If either the + // copy or the rm fails with an exception, we should allow this + // exception to pass up to the top level. + cp({ recursive: true }, src, thisDest); + rm({ recursive: true, force: true }, src); + } + } + }); // forEach(src) + return ''; +} // mv +module.exports = _mv; diff --git a/node_modules/shelljs/src/popd.js b/node_modules/shelljs/src/popd.js new file mode 100644 index 0000000..d9eac3f --- /dev/null +++ b/node_modules/shelljs/src/popd.js @@ -0,0 +1 @@ +// see dirs.js diff --git a/node_modules/shelljs/src/pushd.js b/node_modules/shelljs/src/pushd.js new file mode 100644 index 0000000..d9eac3f --- /dev/null +++ b/node_modules/shelljs/src/pushd.js @@ -0,0 +1 @@ +// see dirs.js diff --git a/node_modules/shelljs/src/pwd.js b/node_modules/shelljs/src/pwd.js new file mode 100644 index 0000000..8527d8b --- /dev/null +++ b/node_modules/shelljs/src/pwd.js @@ -0,0 +1,16 @@ +var path = require('path'); +var common = require('./common'); + +common.register('pwd', _pwd, { + allowGlobbing: false, +}); + +//@ +//@ ### pwd() +//@ +//@ Returns the current directory as a [ShellString](#shellstringstr). +function _pwd() { + var pwd = path.resolve(process.cwd()); + return pwd; +} +module.exports = _pwd; diff --git a/node_modules/shelljs/src/rm.js b/node_modules/shelljs/src/rm.js new file mode 100644 index 0000000..6bb5755 --- /dev/null +++ b/node_modules/shelljs/src/rm.js @@ -0,0 +1,201 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('rm', _rm, { + cmdOptions: { + 'f': 'force', + 'r': 'recursive', + 'R': 'recursive', + }, +}); + +// Recursively removes 'dir' +// Adapted from https://github.com/ryanmcgrath/wrench-js +// +// Copyright (c) 2010 Ryan McGrath +// Copyright (c) 2012 Artur Adib +// +// Licensed under the MIT License +// http://www.opensource.org/licenses/mit-license.php +function rmdirSyncRecursive(dir, force, fromSymlink) { + var files; + + files = fs.readdirSync(dir); + + // Loop through and delete everything in the sub-tree after checking it + for (var i = 0; i < files.length; i++) { + var file = dir + '/' + files[i]; + var currFile = common.statNoFollowLinks(file); + + if (currFile.isDirectory()) { // Recursive function back to the beginning + rmdirSyncRecursive(file, force); + } else if (force || isWriteable(file)) { + // Assume it's a file - perhaps a try/catch belongs here? + try { + common.unlinkSync(file); + } catch (e) { + /* istanbul ignore next */ + common.error('could not remove file (code ' + e.code + '): ' + file, { + continue: true, + }); + } + } + } + + // if was directory was referenced through a symbolic link, + // the contents should be removed, but not the directory itself + if (fromSymlink) return; + + // Now that we know everything in the sub-tree has been deleted, we can delete the main directory. + // Huzzah for the shopkeep. + + var result; + try { + // Retry on windows, sometimes it takes a little time before all the files in the directory are gone + var start = Date.now(); + + // TODO: replace this with a finite loop + for (;;) { + try { + result = fs.rmdirSync(dir); + if (fs.existsSync(dir)) throw { code: 'EAGAIN' }; + break; + } catch (er) { + /* istanbul ignore next */ + // In addition to error codes, also check if the directory still exists and loop again if true + if (process.platform === 'win32' && (er.code === 'ENOTEMPTY' || er.code === 'EBUSY' || er.code === 'EPERM' || er.code === 'EAGAIN')) { + if (Date.now() - start > 1000) throw er; + } else if (er.code === 'ENOENT') { + // Directory did not exist, deletion was successful + break; + } else { + throw er; + } + } + } + } catch (e) { + common.error('could not remove directory (code ' + e.code + '): ' + dir, { continue: true }); + } + + return result; +} // rmdirSyncRecursive + +// Hack to determine if file has write permissions for current user +// Avoids having to check user, group, etc, but it's probably slow +function isWriteable(file) { + var writePermission = true; + try { + var __fd = fs.openSync(file, 'a'); + fs.closeSync(__fd); + } catch (e) { + writePermission = false; + } + + return writePermission; +} + +function handleFile(file, options) { + if (options.force || isWriteable(file)) { + // -f was passed, or file is writable, so it can be removed + common.unlinkSync(file); + } else { + common.error('permission denied: ' + file, { continue: true }); + } +} + +function handleDirectory(file, options) { + if (options.recursive) { + // -r was passed, so directory can be removed + rmdirSyncRecursive(file, options.force); + } else { + common.error('path is a directory', { continue: true }); + } +} + +function handleSymbolicLink(file, options) { + var stats; + try { + stats = common.statFollowLinks(file); + } catch (e) { + // symlink is broken, so remove the symlink itself + common.unlinkSync(file); + return; + } + + if (stats.isFile()) { + common.unlinkSync(file); + } else if (stats.isDirectory()) { + if (file[file.length - 1] === '/') { + // trailing separator, so remove the contents, not the link + if (options.recursive) { + // -r was passed, so directory can be removed + var fromSymlink = true; + rmdirSyncRecursive(file, options.force, fromSymlink); + } else { + common.error('path is a directory', { continue: true }); + } + } else { + // no trailing separator, so remove the link + common.unlinkSync(file); + } + } +} + +function handleFIFO(file) { + common.unlinkSync(file); +} + +//@ +//@ ### rm([options,] file [, file ...]) +//@ ### rm([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-f`: force +//@ + `-r, -R`: recursive +//@ +//@ Examples: +//@ +//@ ```javascript +//@ rm('-rf', '/tmp/*'); +//@ rm('some_file.txt', 'another_file.txt'); +//@ rm(['some_file.txt', 'another_file.txt']); // same as above +//@ ``` +//@ +//@ Removes files. Returns a [ShellString](#shellstringstr) indicating success +//@ or failure. +function _rm(options, files) { + if (!files) common.error('no paths given'); + + // Convert to array + files = [].slice.call(arguments, 1); + + files.forEach(function (file) { + var lstats; + try { + var filepath = (file[file.length - 1] === '/') + ? file.slice(0, -1) // remove the '/' so lstatSync can detect symlinks + : file; + lstats = common.statNoFollowLinks(filepath); // test for existence + } catch (e) { + // Path does not exist, no force flag given + if (!options.force) { + common.error('no such file or directory: ' + file, { continue: true }); + } + return; // skip file + } + + // If here, path exists + if (lstats.isFile()) { + handleFile(file, options); + } else if (lstats.isDirectory()) { + handleDirectory(file, options); + } else if (lstats.isSymbolicLink()) { + handleSymbolicLink(file, options); + } else if (lstats.isFIFO()) { + handleFIFO(file); + } + }); // forEach(file) + return ''; +} // rm +module.exports = _rm; diff --git a/node_modules/shelljs/src/sed.js b/node_modules/shelljs/src/sed.js new file mode 100644 index 0000000..6936523 --- /dev/null +++ b/node_modules/shelljs/src/sed.js @@ -0,0 +1,95 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('sed', _sed, { + globStart: 3, // don't glob-expand regexes + canReceivePipe: true, + cmdOptions: { + 'i': 'inplace', + }, +}); + +//@ +//@ ### sed([options,] search_regex, replacement, file [, file ...]) +//@ ### sed([options,] search_regex, replacement, file_array) +//@ +//@ Available options: +//@ +//@ + `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ +//@ +//@ Examples: +//@ +//@ ```javascript +//@ sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); +//@ ``` +//@ +//@ Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on +//@ each of the lines from the input string using the given `search_regex` and `replacement` string or +//@ function. Returns the new [ShellString](#shellstringstr) after replacement. +//@ +//@ Note: +//@ +//@ Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified +//@ using the `$n` syntax: +//@ +//@ ```javascript +//@ sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); +//@ ``` +//@ +//@ Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file +//@ (split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') +//@ will not match anything and nothing will be replaced. +function _sed(options, regex, replacement, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (typeof replacement !== 'string' && typeof replacement !== 'function') { + if (typeof replacement === 'number') { + replacement = replacement.toString(); // fallback + } else { + common.error('invalid replacement string'); + } + } + + // Convert all search strings to RegExp + if (typeof regex === 'string') { + regex = RegExp(regex); + } + + if (!files && !pipe) { + common.error('no files given'); + } + + files = [].slice.call(arguments, 3); + + if (pipe) { + files.unshift('-'); + } + + var sed = []; + files.forEach(function (file) { + if (!fs.existsSync(file) && file !== '-') { + common.error('no such file or directory: ' + file, 2, { continue: true }); + return; + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + var lines = contents.split('\n'); + var result = lines.map(function (line) { + return line.replace(regex, replacement); + }).join('\n'); + + sed.push(result); + + if (options.inplace) { + fs.writeFileSync(file, result, 'utf8'); + } + }); + + if (options.inplace) { + return ''; + } else { + return sed.join('\n'); + } +} +module.exports = _sed; diff --git a/node_modules/shelljs/src/set.js b/node_modules/shelljs/src/set.js new file mode 100644 index 0000000..6f37bc9 --- /dev/null +++ b/node_modules/shelljs/src/set.js @@ -0,0 +1,55 @@ +var common = require('./common'); + +common.register('set', _set, { + allowGlobbing: false, + wrapOutput: false, +}); + +//@ +//@ ### set(options) +//@ +//@ Available options: +//@ +//@ + `+/-e`: exit upon error (`config.fatal`) +//@ + `+/-v`: verbose: show all commands (`config.verbose`) +//@ + `+/-f`: disable filename expansion (globbing) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ set('-e'); // exit upon first error +//@ set('+e'); // this undoes a "set('-e')" +//@ ``` +//@ +//@ Sets global configuration variables. +function _set(options) { + if (!options) { + var args = [].slice.call(arguments, 0); + if (args.length < 2) common.error('must provide an argument'); + options = args[1]; + } + var negate = (options[0] === '+'); + if (negate) { + options = '-' + options.slice(1); // parseOptions needs a '-' prefix + } + options = common.parseOptions(options, { + 'e': 'fatal', + 'v': 'verbose', + 'f': 'noglob', + }); + + if (negate) { + Object.keys(options).forEach(function (key) { + options[key] = !options[key]; + }); + } + + Object.keys(options).forEach(function (key) { + // Only change the global config if `negate` is false and the option is true + // or if `negate` is true and the option is false (aka negate !== option) + if (negate !== options[key]) { + common.config[key] = options[key]; + } + }); +} +module.exports = _set; diff --git a/node_modules/shelljs/src/sort.js b/node_modules/shelljs/src/sort.js new file mode 100644 index 0000000..66b042c --- /dev/null +++ b/node_modules/shelljs/src/sort.js @@ -0,0 +1,98 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('sort', _sort, { + canReceivePipe: true, + cmdOptions: { + 'r': 'reverse', + 'n': 'numerical', + }, +}); + +// parse out the number prefix of a line +function parseNumber(str) { + var match = str.match(/^\s*(\d*)\s*(.*)$/); + return { num: Number(match[1]), value: match[2] }; +} + +// compare two strings case-insensitively, but examine case for strings that are +// case-insensitive equivalent +function unixCmp(a, b) { + var aLower = a.toLowerCase(); + var bLower = b.toLowerCase(); + return (aLower === bLower ? + -1 * a.localeCompare(b) : // unix sort treats case opposite how javascript does + aLower.localeCompare(bLower)); +} + +// compare two strings in the fashion that unix sort's -n option works +function numericalCmp(a, b) { + var objA = parseNumber(a); + var objB = parseNumber(b); + if (objA.hasOwnProperty('num') && objB.hasOwnProperty('num')) { + return ((objA.num !== objB.num) ? + (objA.num - objB.num) : + unixCmp(objA.value, objB.value)); + } else { + return unixCmp(objA.value, objB.value); + } +} + +//@ +//@ ### sort([options,] file [, file ...]) +//@ ### sort([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-r`: Reverse the results +//@ + `-n`: Compare according to numerical value +//@ +//@ Examples: +//@ +//@ ```javascript +//@ sort('foo.txt', 'bar.txt'); +//@ sort('-r', 'foo.txt'); +//@ ``` +//@ +//@ Return the contents of the `file`s, sorted line-by-line as a +//@ [ShellString](#shellstringstr). Sorting multiple files mixes their content +//@ (just as unix `sort` does). +function _sort(options, files) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no files given'); + + files = [].slice.call(arguments, 1); + + if (pipe) { + files.unshift('-'); + } + + var lines = files.reduce(function (accum, file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return accum; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error('read failed: ' + file + ': Is a directory', { + continue: true, + }); + return accum; + } + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + return accum.concat(contents.trimRight().split('\n')); + }, []); + + var sorted = lines.sort(options.numerical ? numericalCmp : unixCmp); + + if (options.reverse) { + sorted = sorted.reverse(); + } + + return sorted.join('\n') + '\n'; +} + +module.exports = _sort; diff --git a/node_modules/shelljs/src/tail.js b/node_modules/shelljs/src/tail.js new file mode 100644 index 0000000..eee75c5 --- /dev/null +++ b/node_modules/shelljs/src/tail.js @@ -0,0 +1,90 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('tail', _tail, { + canReceivePipe: true, + cmdOptions: { + 'n': 'numLines', + }, +}); + +//@ +//@ ### tail([{'-n': \},] file [, file ...]) +//@ ### tail([{'-n': \},] file_array) +//@ +//@ Available options: +//@ +//@ + `-n `: Show the last `` lines of `file`s +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var str = tail({'-n': 1}, 'file*.txt'); +//@ var str = tail('file1', 'file2'); +//@ var str = tail(['file1', 'file2']); // same as above +//@ ``` +//@ +//@ Read the end of a `file`. Returns a [ShellString](#shellstringstr). +function _tail(options, files) { + var tail = []; + var pipe = common.readFromPipe(); + + if (!files && !pipe) common.error('no paths given'); + + var idx = 1; + var plusOption = false; + if (options.numLines === true) { + idx = 2; + if (arguments[1][0] === '+') { + plusOption = true; + } + options.numLines = Number(arguments[1]); + } else if (options.numLines === false) { + options.numLines = 10; + } + // arguments[0] is a json object + if (arguments[0].numLines[0] === '+') { + plusOption = true; + } + options.numLines = -1 * Math.abs(options.numLines); + files = [].slice.call(arguments, idx); + + if (pipe) { + files.unshift('-'); + } + + var shouldAppendNewline = false; + files.forEach(function (file) { + if (file !== '-') { + if (!fs.existsSync(file)) { + common.error('no such file or directory: ' + file, { continue: true }); + return; + } else if (common.statFollowLinks(file).isDirectory()) { + common.error("error reading '" + file + "': Is a directory", { + continue: true, + }); + return; + } + } + + var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); + + var lines = contents.split('\n'); + if (lines[lines.length - 1] === '') { + lines.pop(); + shouldAppendNewline = true; + } else { + shouldAppendNewline = false; + } + + tail = tail.concat(plusOption ? lines.slice(-options.numLines - 1) : lines.slice(options.numLines)); + }); + + if (shouldAppendNewline) { + tail.push(''); // to add a trailing newline once we join + } + + return tail.join('\n'); +} + +module.exports = _tail; diff --git a/node_modules/shelljs/src/tempdir.js b/node_modules/shelljs/src/tempdir.js new file mode 100644 index 0000000..b6f7796 --- /dev/null +++ b/node_modules/shelljs/src/tempdir.js @@ -0,0 +1,75 @@ +var os = require('os'); +var fs = require('fs'); +var common = require('./common'); + +common.register('tempdir', _tempDir, { + allowGlobbing: false, + wrapOutput: false, +}); + +// Returns false if 'dir' is not a writeable directory, 'dir' otherwise +function writeableDir(dir) { + if (!dir || !fs.existsSync(dir)) return false; + + if (!common.statFollowLinks(dir).isDirectory()) return false; + + var testFile = dir + '/' + common.randomFileName(); + try { + fs.writeFileSync(testFile, ' '); + common.unlinkSync(testFile); + return dir; + } catch (e) { + /* istanbul ignore next */ + return false; + } +} + +// Variable to cache the tempdir value for successive lookups. +var cachedTempDir; + +//@ +//@ ### tempdir() +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var tmp = tempdir(); // "/tmp" for most *nix platforms +//@ ``` +//@ +//@ Searches and returns string containing a writeable, platform-dependent temporary directory. +//@ Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). +function _tempDir() { + if (cachedTempDir) return cachedTempDir; + + cachedTempDir = writeableDir(os.tmpdir()) || + writeableDir(process.env.TMPDIR) || + writeableDir(process.env.TEMP) || + writeableDir(process.env.TMP) || + writeableDir(process.env.Wimp$ScrapDir) || // RiscOS + writeableDir('C:\\TEMP') || // Windows + writeableDir('C:\\TMP') || // Windows + writeableDir('\\TEMP') || // Windows + writeableDir('\\TMP') || // Windows + writeableDir('/tmp') || + writeableDir('/var/tmp') || + writeableDir('/usr/tmp') || + writeableDir('.'); // last resort + + return cachedTempDir; +} + +// Indicates if the tempdir value is currently cached. This is exposed for tests +// only. The return value should only be tested for truthiness. +function isCached() { + return cachedTempDir; +} + +// Clears the cached tempDir value, if one is cached. This is exposed for tests +// only. +function clearCache() { + cachedTempDir = undefined; +} + +module.exports.tempDir = _tempDir; +module.exports.isCached = isCached; +module.exports.clearCache = clearCache; diff --git a/node_modules/shelljs/src/test.js b/node_modules/shelljs/src/test.js new file mode 100644 index 0000000..7e76908 --- /dev/null +++ b/node_modules/shelljs/src/test.js @@ -0,0 +1,86 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('test', _test, { + cmdOptions: { + 'b': 'block', + 'c': 'character', + 'd': 'directory', + 'e': 'exists', + 'f': 'file', + 'L': 'link', + 'p': 'pipe', + 'S': 'socket', + }, + wrapOutput: false, + allowGlobbing: false, +}); + + +//@ +//@ ### test(expression) +//@ +//@ Available expression primaries: +//@ +//@ + `'-b', 'path'`: true if path is a block device +//@ + `'-c', 'path'`: true if path is a character device +//@ + `'-d', 'path'`: true if path is a directory +//@ + `'-e', 'path'`: true if path exists +//@ + `'-f', 'path'`: true if path is a regular file +//@ + `'-L', 'path'`: true if path is a symbolic link +//@ + `'-p', 'path'`: true if path is a pipe (FIFO) +//@ + `'-S', 'path'`: true if path is a socket +//@ +//@ Examples: +//@ +//@ ```javascript +//@ if (test('-d', path)) { /* do something with dir */ }; +//@ if (!test('-f', path)) continue; // skip if it's not a regular file +//@ ``` +//@ +//@ Evaluates `expression` using the available primaries and returns +//@ corresponding boolean value. +function _test(options, path) { + if (!path) common.error('no path given'); + + var canInterpret = false; + Object.keys(options).forEach(function (key) { + if (options[key] === true) { + canInterpret = true; + } + }); + + if (!canInterpret) common.error('could not interpret expression'); + + if (options.link) { + try { + return common.statNoFollowLinks(path).isSymbolicLink(); + } catch (e) { + return false; + } + } + + if (!fs.existsSync(path)) return false; + + if (options.exists) return true; + + var stats = common.statFollowLinks(path); + + if (options.block) return stats.isBlockDevice(); + + if (options.character) return stats.isCharacterDevice(); + + if (options.directory) return stats.isDirectory(); + + if (options.file) return stats.isFile(); + + /* istanbul ignore next */ + if (options.pipe) return stats.isFIFO(); + + /* istanbul ignore next */ + if (options.socket) return stats.isSocket(); + + /* istanbul ignore next */ + return false; // fallback +} // test +module.exports = _test; diff --git a/node_modules/shelljs/src/to.js b/node_modules/shelljs/src/to.js new file mode 100644 index 0000000..e4b064f --- /dev/null +++ b/node_modules/shelljs/src/to.js @@ -0,0 +1,38 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('to', _to, { + pipeOnly: true, + wrapOutput: false, +}); + +//@ +//@ ### ShellString.prototype.to(file) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cat('input.txt').to('output.txt'); +//@ ``` +//@ +//@ Analogous to the redirection operator `>` in Unix, but works with +//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix +//@ redirections, `to()` will overwrite any existing file!_ Returns the same +//@ [ShellString](#shellstringstr) this operated on, to support chaining. +function _to(options, file) { + if (!file) common.error('wrong arguments'); + + if (!fs.existsSync(path.dirname(file))) { + common.error('no such file or directory: ' + path.dirname(file)); + } + + try { + fs.writeFileSync(file, this.stdout || this.toString(), 'utf8'); + return this; + } catch (e) { + /* istanbul ignore next */ + common.error('could not write to file (code ' + e.code + '): ' + file, { continue: true }); + } +} +module.exports = _to; diff --git a/node_modules/shelljs/src/toEnd.js b/node_modules/shelljs/src/toEnd.js new file mode 100644 index 0000000..dc30e62 --- /dev/null +++ b/node_modules/shelljs/src/toEnd.js @@ -0,0 +1,37 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('toEnd', _toEnd, { + pipeOnly: true, + wrapOutput: false, +}); + +//@ +//@ ### ShellString.prototype.toEnd(file) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ cat('input.txt').toEnd('output.txt'); +//@ ``` +//@ +//@ Analogous to the redirect-and-append operator `>>` in Unix, but works with +//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the +//@ same [ShellString](#shellstringstr) this operated on, to support chaining. +function _toEnd(options, file) { + if (!file) common.error('wrong arguments'); + + if (!fs.existsSync(path.dirname(file))) { + common.error('no such file or directory: ' + path.dirname(file)); + } + + try { + fs.appendFileSync(file, this.stdout || this.toString(), 'utf8'); + return this; + } catch (e) { + /* istanbul ignore next */ + common.error('could not append to file (code ' + e.code + '): ' + file, { continue: true }); + } +} +module.exports = _toEnd; diff --git a/node_modules/shelljs/src/touch.js b/node_modules/shelljs/src/touch.js new file mode 100644 index 0000000..a268586 --- /dev/null +++ b/node_modules/shelljs/src/touch.js @@ -0,0 +1,117 @@ +var fs = require('fs'); +var common = require('./common'); + +common.register('touch', _touch, { + cmdOptions: { + 'a': 'atime_only', + 'c': 'no_create', + 'd': 'date', + 'm': 'mtime_only', + 'r': 'reference', + }, +}); + +//@ +//@ ### touch([options,] file [, file ...]) +//@ ### touch([options,] file_array) +//@ +//@ Available options: +//@ +//@ + `-a`: Change only the access time +//@ + `-c`: Do not create any files +//@ + `-m`: Change only the modification time +//@ + `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) +//@ instead of current time +//@ + `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current +//@ time +//@ +//@ Examples: +//@ +//@ ```javascript +//@ touch('source.js'); +//@ touch('-c', 'path/to/file.js'); +//@ touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); +//@ touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); +//@ touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); +//@ ``` +//@ +//@ Update the access and modification times of each file to the current time. +//@ A file argument that does not exist is created empty, unless `-c` is supplied. +//@ This is a partial implementation of +//@ [`touch(1)`](http://linux.die.net/man/1/touch). Returns a +//@ [ShellString](#shellstringstr) indicating success or failure. +function _touch(opts, files) { + if (!files) { + common.error('no files given'); + } else if (typeof files === 'string') { + files = [].slice.call(arguments, 1); + } else { + common.error('file arg should be a string file path or an Array of string file paths'); + } + + files.forEach(function (f) { + touchFile(opts, f); + }); + return ''; +} + +function touchFile(opts, file) { + var stat = tryStatFile(file); + + if (stat && stat.isDirectory()) { + // don't error just exit + return; + } + + // if the file doesn't already exist and the user has specified --no-create then + // this script is finished + if (!stat && opts.no_create) { + return; + } + + // open the file and then close it. this will create it if it doesn't exist but will + // not truncate the file + fs.closeSync(fs.openSync(file, 'a')); + + // + // Set timestamps + // + + // setup some defaults + var now = new Date(); + var mtime = opts.date || now; + var atime = opts.date || now; + + // use reference file + if (opts.reference) { + var refStat = tryStatFile(opts.reference); + if (!refStat) { + common.error('failed to get attributess of ' + opts.reference); + } + mtime = refStat.mtime; + atime = refStat.atime; + } else if (opts.date) { + mtime = opts.date; + atime = opts.date; + } + + if (opts.atime_only && opts.mtime_only) { + // keep the new values of mtime and atime like GNU + } else if (opts.atime_only) { + mtime = stat.mtime; + } else if (opts.mtime_only) { + atime = stat.atime; + } + + fs.utimesSync(file, atime, mtime); +} + +module.exports = _touch; + +function tryStatFile(filePath) { + try { + return common.statFollowLinks(filePath); + } catch (e) { + return null; + } +} diff --git a/node_modules/shelljs/src/uniq.js b/node_modules/shelljs/src/uniq.js new file mode 100644 index 0000000..5802706 --- /dev/null +++ b/node_modules/shelljs/src/uniq.js @@ -0,0 +1,93 @@ +var fs = require('fs'); +var common = require('./common'); + +// add c spaces to the left of str +function lpad(c, str) { + var res = '' + str; + if (res.length < c) { + res = Array((c - res.length) + 1).join(' ') + res; + } + return res; +} + +common.register('uniq', _uniq, { + canReceivePipe: true, + cmdOptions: { + 'i': 'ignoreCase', + 'c': 'count', + 'd': 'duplicates', + }, +}); + +//@ +//@ ### uniq([options,] [input, [output]]) +//@ +//@ Available options: +//@ +//@ + `-i`: Ignore case while comparing +//@ + `-c`: Prefix lines by the number of occurrences +//@ + `-d`: Only print duplicate lines, one for each group of identical lines +//@ +//@ Examples: +//@ +//@ ```javascript +//@ uniq('foo.txt'); +//@ uniq('-i', 'foo.txt'); +//@ uniq('-cd', 'foo.txt', 'bar.txt'); +//@ ``` +//@ +//@ Filter adjacent matching lines from `input`. Returns a +//@ [ShellString](#shellstringstr). +function _uniq(options, input, output) { + // Check if this is coming from a pipe + var pipe = common.readFromPipe(); + + if (!pipe) { + if (!input) common.error('no input given'); + + if (!fs.existsSync(input)) { + common.error(input + ': No such file or directory'); + } else if (common.statFollowLinks(input).isDirectory()) { + common.error("error reading '" + input + "'"); + } + } + if (output && fs.existsSync(output) && common.statFollowLinks(output).isDirectory()) { + common.error(output + ': Is a directory'); + } + + var lines = (input ? fs.readFileSync(input, 'utf8') : pipe) + .trimRight() + .split('\n'); + + var compare = function (a, b) { + return options.ignoreCase ? + a.toLocaleLowerCase().localeCompare(b.toLocaleLowerCase()) : + a.localeCompare(b); + }; + var uniqed = lines.reduceRight(function (res, e) { + // Perform uniq -c on the input + if (res.length === 0) { + return [{ count: 1, ln: e }]; + } else if (compare(res[0].ln, e) === 0) { + return [{ count: res[0].count + 1, ln: e }].concat(res.slice(1)); + } else { + return [{ count: 1, ln: e }].concat(res); + } + }, []).filter(function (obj) { + // Do we want only duplicated objects? + return options.duplicates ? obj.count > 1 : true; + }).map(function (obj) { + // Are we tracking the counts of each line? + return (options.count ? (lpad(7, obj.count) + ' ') : '') + obj.ln; + }).join('\n') + '\n'; + + if (output) { + (new common.ShellString(uniqed)).to(output); + // if uniq writes to output, nothing is passed to the next command in the pipeline (if any) + return ''; + } else { + return uniqed; + } +} + +module.exports = _uniq; diff --git a/node_modules/shelljs/src/which.js b/node_modules/shelljs/src/which.js new file mode 100644 index 0000000..8ac7b77 --- /dev/null +++ b/node_modules/shelljs/src/which.js @@ -0,0 +1,119 @@ +var fs = require('fs'); +var path = require('path'); +var common = require('./common'); + +common.register('which', _which, { + allowGlobbing: false, + cmdOptions: { + 'a': 'all', + }, +}); + +// XP's system default value for `PATHEXT` system variable, just in case it's not +// set on Windows. +var XP_DEFAULT_PATHEXT = '.com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh'; + +// For earlier versions of NodeJS that doesn't have a list of constants (< v6) +var FILE_EXECUTABLE_MODE = 1; + +function isWindowsPlatform() { + return process.platform === 'win32'; +} + +// Cross-platform method for splitting environment `PATH` variables +function splitPath(p) { + return p ? p.split(path.delimiter) : []; +} + +// Tests are running all cases for this func but it stays uncovered by codecov due to unknown reason +/* istanbul ignore next */ +function isExecutable(pathName) { + try { + // TODO(node-support): replace with fs.constants.X_OK once remove support for node < v6 + fs.accessSync(pathName, FILE_EXECUTABLE_MODE); + } catch (err) { + return false; + } + return true; +} + +function checkPath(pathName) { + return fs.existsSync(pathName) && !common.statFollowLinks(pathName).isDirectory() + && (isWindowsPlatform() || isExecutable(pathName)); +} + +//@ +//@ ### which(command) +//@ +//@ Examples: +//@ +//@ ```javascript +//@ var nodeExec = which('node'); +//@ ``` +//@ +//@ Searches for `command` in the system's `PATH`. On Windows, this uses the +//@ `PATHEXT` variable to append the extension if it's not already executable. +//@ Returns a [ShellString](#shellstringstr) containing the absolute path to +//@ `command`. +function _which(options, cmd) { + if (!cmd) common.error('must specify command'); + + var isWindows = isWindowsPlatform(); + var pathArray = splitPath(process.env.PATH); + + var queryMatches = []; + + // No relative/absolute paths provided? + if (!cmd.includes('/')) { + // Assume that there are no extensions to append to queries (this is the + // case for unix) + var pathExtArray = ['']; + if (isWindows) { + // In case the PATHEXT variable is somehow not set (e.g. + // child_process.spawn with an empty environment), use the XP default. + var pathExtEnv = process.env.PATHEXT || XP_DEFAULT_PATHEXT; + pathExtArray = splitPath(pathExtEnv.toUpperCase()); + } + + // Search for command in PATH + for (var k = 0; k < pathArray.length; k++) { + // already found it + if (queryMatches.length > 0 && !options.all) break; + + var attempt = path.resolve(pathArray[k], cmd); + + if (isWindows) { + attempt = attempt.toUpperCase(); + } + + var match = attempt.match(/\.[^<>:"/|?*.]+$/); + if (match && pathExtArray.includes(match[0])) { // this is Windows-only + // The user typed a query with the file extension, like + // `which('node.exe')` + if (checkPath(attempt)) { + queryMatches.push(attempt); + break; + } + } else { // All-platforms + // Cycle through the PATHEXT array, and check each extension + // Note: the array is always [''] on Unix + for (var i = 0; i < pathExtArray.length; i++) { + var ext = pathExtArray[i]; + var newAttempt = attempt + ext; + if (checkPath(newAttempt)) { + queryMatches.push(newAttempt); + break; + } + } + } + } + } else if (checkPath(cmd)) { // a valid absolute or relative path + queryMatches.push(path.resolve(cmd)); + } + + if (queryMatches.length > 0) { + return options.all ? queryMatches : queryMatches[0]; + } + return options.all ? [] : null; +} +module.exports = _which; diff --git a/node_modules/signal-exit/LICENSE.txt b/node_modules/signal-exit/LICENSE.txt new file mode 100644 index 0000000..eead04a --- /dev/null +++ b/node_modules/signal-exit/LICENSE.txt @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/signal-exit/README.md b/node_modules/signal-exit/README.md new file mode 100644 index 0000000..f9c7c00 --- /dev/null +++ b/node_modules/signal-exit/README.md @@ -0,0 +1,39 @@ +# signal-exit + +[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) +[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) +[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +When you want to fire an event no matter how a process exits: + +* reaching the end of execution. +* explicitly having `process.exit(code)` called. +* having `process.kill(pid, sig)` called. +* receiving a fatal signal from outside the process + +Use `signal-exit`. + +```js +var onExit = require('signal-exit') + +onExit(function (code, signal) { + console.log('process exited!') +}) +``` + +## API + +`var remove = onExit(function (code, signal) {}, options)` + +The return value of the function is a function that will remove the +handler. + +Note that the function *only* fires for signals if the signal would +cause the process to exit. That is, there are no other listeners, and +it is a fatal signal. + +## Options + +* `alwaysLast`: Run this handler after any other signal or exit + handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/signal-exit/index.js b/node_modules/signal-exit/index.js new file mode 100644 index 0000000..93703f3 --- /dev/null +++ b/node_modules/signal-exit/index.js @@ -0,0 +1,202 @@ +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +// grab a reference to node's real process object right away +var process = global.process + +const processOk = function (process) { + return process && + typeof process === 'object' && + typeof process.removeListener === 'function' && + typeof process.emit === 'function' && + typeof process.reallyExit === 'function' && + typeof process.listeners === 'function' && + typeof process.kill === 'function' && + typeof process.pid === 'number' && + typeof process.on === 'function' +} + +// some kind of non-node environment, just no-op +/* istanbul ignore if */ +if (!processOk(process)) { + module.exports = function () { + return function () {} + } +} else { + var assert = require('assert') + var signals = require('./signals.js') + var isWin = /^win/i.test(process.platform) + + var EE = require('events') + /* istanbul ignore if */ + if (typeof EE !== 'function') { + EE = EE.EventEmitter + } + + var emitter + if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ + } else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} + } + + // Because this emitter is a global, we have to check to see if a + // previous version of this library failed to enable infinite listeners. + // I know what you're about to say. But literally everything about + // signal-exit is a compromise with evil. Get used to it. + if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true + } + + module.exports = function (cb, opts) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return function () {} + } + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove + } + + var unload = function unload () { + if (!loaded || !processOk(global.process)) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 + } + module.exports.unload = unload + + var emit = function emit (event, code, signal) { + /* istanbul ignore if */ + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) + } + + // { : , ... } + var sigListeners = {} + signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + /* istanbul ignore next */ + process.kill(process.pid, sig) + } + } + }) + + module.exports.signals = function () { + return signals + } + + var loaded = false + + var load = function load () { + if (loaded || !processOk(global.process)) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit + } + module.exports.load = load + + var originalProcessReallyExit = process.reallyExit + var processReallyExit = function processReallyExit (code) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + process.exitCode = code || /* istanbul ignore next */ 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) + } + + var originalProcessEmit = process.emit + var processEmit = function processEmit (ev, arg) { + if (ev === 'exit' && processOk(global.process)) { + /* istanbul ignore else */ + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + /* istanbul ignore next */ + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } + } +} diff --git a/node_modules/signal-exit/package.json b/node_modules/signal-exit/package.json new file mode 100644 index 0000000..e1a0031 --- /dev/null +++ b/node_modules/signal-exit/package.json @@ -0,0 +1,38 @@ +{ + "name": "signal-exit", + "version": "3.0.7", + "description": "when you want to fire an event no matter how a process exits.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "files": [ + "index.js", + "signals.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/tapjs/signal-exit.git" + }, + "keywords": [ + "signal", + "exit" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/tapjs/signal-exit/issues" + }, + "homepage": "https://github.com/tapjs/signal-exit", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^3.1.1", + "nyc": "^15.1.0", + "standard-version": "^9.3.1", + "tap": "^15.1.1" + } +} diff --git a/node_modules/signal-exit/signals.js b/node_modules/signal-exit/signals.js new file mode 100644 index 0000000..3bd67a8 --- /dev/null +++ b/node_modules/signal-exit/signals.js @@ -0,0 +1,53 @@ +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/node_modules/strip-final-newline/index.js b/node_modules/strip-final-newline/index.js new file mode 100644 index 0000000..78fc0c5 --- /dev/null +++ b/node_modules/strip-final-newline/index.js @@ -0,0 +1,16 @@ +'use strict'; + +module.exports = input => { + const LF = typeof input === 'string' ? '\n' : '\n'.charCodeAt(); + const CR = typeof input === 'string' ? '\r' : '\r'.charCodeAt(); + + if (input[input.length - 1] === LF) { + input = input.slice(0, input.length - 1); + } + + if (input[input.length - 1] === CR) { + input = input.slice(0, input.length - 1); + } + + return input; +}; diff --git a/node_modules/strip-final-newline/license b/node_modules/strip-final-newline/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/strip-final-newline/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-final-newline/package.json b/node_modules/strip-final-newline/package.json new file mode 100644 index 0000000..d9f2a6c --- /dev/null +++ b/node_modules/strip-final-newline/package.json @@ -0,0 +1,40 @@ +{ + "name": "strip-final-newline", + "version": "2.0.0", + "description": "Strip the final newline character from a string/buffer", + "license": "MIT", + "repository": "sindresorhus/strip-final-newline", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "strip", + "trim", + "remove", + "delete", + "final", + "last", + "end", + "file", + "newline", + "linebreak", + "character", + "string", + "buffer" + ], + "devDependencies": { + "ava": "^0.25.0", + "xo": "^0.23.0" + } +} diff --git a/node_modules/strip-final-newline/readme.md b/node_modules/strip-final-newline/readme.md new file mode 100644 index 0000000..32dfd50 --- /dev/null +++ b/node_modules/strip-final-newline/readme.md @@ -0,0 +1,30 @@ +# strip-final-newline [![Build Status](https://travis-ci.com/sindresorhus/strip-final-newline.svg?branch=master)](https://travis-ci.com/sindresorhus/strip-final-newline) + +> Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string/buffer + +Can be useful when parsing the output of, for example, `ChildProcess#execFile`, as [binaries usually output a newline at the end](https://stackoverflow.com/questions/729692/why-should-text-files-end-with-a-newline). Normally, you would use `stdout.trim()`, but that would also remove newlines at the start and whitespace. + + +## Install + +``` +$ npm install strip-final-newline +``` + + +## Usage + +```js +const stripFinalNewline = require('strip-final-newline'); + +stripFinalNewline('foo\nbar\n\n'); +//=> 'foo\nbar\n' + +stripFinalNewline(Buffer.from('foo\nbar\n\n')).toString(); +//=> 'foo\nbar\n' +``` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE new file mode 100644 index 0000000..7cccaf9 --- /dev/null +++ b/node_modules/to-regex-range/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015-present, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md new file mode 100644 index 0000000..38887da --- /dev/null +++ b/node_modules/to-regex-range/README.md @@ -0,0 +1,305 @@ +# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) + +> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. + +Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. + +## Install + +Install with [npm](https://www.npmjs.com/): + +```sh +$ npm install --save to-regex-range +``` + +
+What does this do? + +
+ +This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. + +**Example** + +```js +const toRegexRange = require('to-regex-range'); +const regex = new RegExp(toRegexRange('15', '95')); +``` + +A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). + +
+ +
+ +
+Why use this library? + +
+ +### Convenience + +Creating regular expressions for matching numbers gets deceptively complicated pretty fast. + +For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: + +* regex for matching `1` => `/1/` (easy enough) +* regex for matching `1` through `5` => `/[1-5]/` (not bad...) +* regex for matching `1` or `5` => `/(1|5)/` (still easy...) +* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) +* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) +* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) +* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) + +The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. + +**Learn more** + +If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. + +### Heavily tested + +As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. + +Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. + +### Optimized + +Generated regular expressions are optimized: + +* duplicate sequences and character classes are reduced using quantifiers +* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative +* uses fragment caching to avoid processing the same exact string more than once + +
+ +
+ +## Usage + +Add this library to your javascript application with the following line of code + +```js +const toRegexRange = require('to-regex-range'); +``` + +The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). + +```js +const source = toRegexRange('15', '95'); +//=> 1[5-9]|[2-8][0-9]|9[0-5] + +const regex = new RegExp(`^${source}$`); +console.log(regex.test('14')); //=> false +console.log(regex.test('50')); //=> true +console.log(regex.test('94')); //=> true +console.log(regex.test('96')); //=> false +``` + +## Options + +### options.capture + +**Type**: `boolean` + +**Deafault**: `undefined` + +Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. + +```js +console.log(toRegexRange('-10', '10')); +//=> -[1-9]|-?10|[0-9] + +console.log(toRegexRange('-10', '10', { capture: true })); +//=> (-[1-9]|-?10|[0-9]) +``` + +### options.shorthand + +**Type**: `boolean` + +**Deafault**: `undefined` + +Use the regex shorthand for `[0-9]`: + +```js +console.log(toRegexRange('0', '999999')); +//=> [0-9]|[1-9][0-9]{1,5} + +console.log(toRegexRange('0', '999999', { shorthand: true })); +//=> \d|[1-9]\d{1,5} +``` + +### options.relaxZeros + +**Type**: `boolean` + +**Default**: `true` + +This option relaxes matching for leading zeros when when ranges are zero-padded. + +```js +const source = toRegexRange('-0010', '0010'); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> true +console.log(regex.test('-010')); //=> true +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> true +console.log(regex.test('010')); //=> true +console.log(regex.test('0010')); //=> true +``` + +When `relaxZeros` is false, matching is strict: + +```js +const source = toRegexRange('-0010', '0010', { relaxZeros: false }); +const regex = new RegExp(`^${source}$`); +console.log(regex.test('-10')); //=> false +console.log(regex.test('-010')); //=> false +console.log(regex.test('-0010')); //=> true +console.log(regex.test('10')); //=> false +console.log(regex.test('010')); //=> false +console.log(regex.test('0010')); //=> true +``` + +## Examples + +| **Range** | **Result** | **Compile time** | +| --- | --- | --- | +| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | +| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | +| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | +| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | +| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | +| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | +| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | +| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | +| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | +| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | +| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | +| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | +| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | +| `toRegexRange(5, 5)` | `5` | _8μs_ | +| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | +| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | +| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | +| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | +| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | +| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | +| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | +| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | +| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | +| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | + +## Heads up! + +**Order of arguments** + +When the `min` is larger than the `max`, values will be flipped to create a valid range: + +```js +toRegexRange('51', '29'); +``` + +Is effectively flipped to: + +```js +toRegexRange('29', '51'); +//=> 29|[3-4][0-9]|5[0-1] +``` + +**Steps / increments** + +This library does not support steps (increments). A pr to add support would be welcome. + +## History + +### v2.0.0 - 2017-04-21 + +**New features** + +Adds support for zero-padding! + +### v1.0.0 + +**Optimizations** + +Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. + +## Attribution + +Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). + +## About + +
+Contributing + +Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). + +
+ +
+Running Tests + +Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: + +```sh +$ npm install && npm test +``` + +
+ +
+Building docs + +_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ + +To generate the readme, run the following command: + +```sh +$ npm install -g verbose/verb#dev verb-generate-readme && verb +``` + +
+ +### Related projects + +You might also be interested in these projects: + +* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") +* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") +* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") +* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") +* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") + +### Contributors + +| **Commits** | **Contributor** | +| --- | --- | +| 63 | [jonschlinkert](https://github.com/jonschlinkert) | +| 3 | [doowb](https://github.com/doowb) | +| 2 | [realityking](https://github.com/realityking) | + +### Author + +**Jon Schlinkert** + +* [GitHub Profile](https://github.com/jonschlinkert) +* [Twitter Profile](https://twitter.com/jonschlinkert) +* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) + +Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! + + + + + +### License + +Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). +Released under the [MIT License](LICENSE). + +*** + +_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js new file mode 100644 index 0000000..77fbace --- /dev/null +++ b/node_modules/to-regex-range/index.js @@ -0,0 +1,288 @@ +/*! + * to-regex-range + * + * Copyright (c) 2015-present, Jon Schlinkert. + * Released under the MIT License. + */ + +'use strict'; + +const isNumber = require('is-number'); + +const toRegexRange = (min, max, options) => { + if (isNumber(min) === false) { + throw new TypeError('toRegexRange: expected the first argument to be a number'); + } + + if (max === void 0 || min === max) { + return String(min); + } + + if (isNumber(max) === false) { + throw new TypeError('toRegexRange: expected the second argument to be a number.'); + } + + let opts = { relaxZeros: true, ...options }; + if (typeof opts.strictZeros === 'boolean') { + opts.relaxZeros = opts.strictZeros === false; + } + + let relax = String(opts.relaxZeros); + let shorthand = String(opts.shorthand); + let capture = String(opts.capture); + let wrap = String(opts.wrap); + let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; + + if (toRegexRange.cache.hasOwnProperty(cacheKey)) { + return toRegexRange.cache[cacheKey].result; + } + + let a = Math.min(min, max); + let b = Math.max(min, max); + + if (Math.abs(a - b) === 1) { + let result = min + '|' + max; + if (opts.capture) { + return `(${result})`; + } + if (opts.wrap === false) { + return result; + } + return `(?:${result})`; + } + + let isPadded = hasPadding(min) || hasPadding(max); + let state = { min, max, a, b }; + let positives = []; + let negatives = []; + + if (isPadded) { + state.isPadded = isPadded; + state.maxLen = String(state.max).length; + } + + if (a < 0) { + let newMin = b < 0 ? Math.abs(b) : 1; + negatives = splitToPatterns(newMin, Math.abs(a), state, opts); + a = state.a = 0; + } + + if (b >= 0) { + positives = splitToPatterns(a, b, state, opts); + } + + state.negatives = negatives; + state.positives = positives; + state.result = collatePatterns(negatives, positives, opts); + + if (opts.capture === true) { + state.result = `(${state.result})`; + } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { + state.result = `(?:${state.result})`; + } + + toRegexRange.cache[cacheKey] = state; + return state.result; +}; + +function collatePatterns(neg, pos, options) { + let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; + let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; + let intersected = filterPatterns(neg, pos, '-?', true, options) || []; + let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); + return subpatterns.join('|'); +} + +function splitToRanges(min, max) { + let nines = 1; + let zeros = 1; + + let stop = countNines(min, nines); + let stops = new Set([max]); + + while (min <= stop && stop <= max) { + stops.add(stop); + nines += 1; + stop = countNines(min, nines); + } + + stop = countZeros(max + 1, zeros) - 1; + + while (min < stop && stop <= max) { + stops.add(stop); + zeros += 1; + stop = countZeros(max + 1, zeros) - 1; + } + + stops = [...stops]; + stops.sort(compare); + return stops; +} + +/** + * Convert a range to a regex pattern + * @param {Number} `start` + * @param {Number} `stop` + * @return {String} + */ + +function rangeToPattern(start, stop, options) { + if (start === stop) { + return { pattern: start, count: [], digits: 0 }; + } + + let zipped = zip(start, stop); + let digits = zipped.length; + let pattern = ''; + let count = 0; + + for (let i = 0; i < digits; i++) { + let [startDigit, stopDigit] = zipped[i]; + + if (startDigit === stopDigit) { + pattern += startDigit; + + } else if (startDigit !== '0' || stopDigit !== '9') { + pattern += toCharacterClass(startDigit, stopDigit, options); + + } else { + count++; + } + } + + if (count) { + pattern += options.shorthand === true ? '\\d' : '[0-9]'; + } + + return { pattern, count: [count], digits }; +} + +function splitToPatterns(min, max, tok, options) { + let ranges = splitToRanges(min, max); + let tokens = []; + let start = min; + let prev; + + for (let i = 0; i < ranges.length; i++) { + let max = ranges[i]; + let obj = rangeToPattern(String(start), String(max), options); + let zeros = ''; + + if (!tok.isPadded && prev && prev.pattern === obj.pattern) { + if (prev.count.length > 1) { + prev.count.pop(); + } + + prev.count.push(obj.count[0]); + prev.string = prev.pattern + toQuantifier(prev.count); + start = max + 1; + continue; + } + + if (tok.isPadded) { + zeros = padZeros(max, tok, options); + } + + obj.string = zeros + obj.pattern + toQuantifier(obj.count); + tokens.push(obj); + start = max + 1; + prev = obj; + } + + return tokens; +} + +function filterPatterns(arr, comparison, prefix, intersection, options) { + let result = []; + + for (let ele of arr) { + let { string } = ele; + + // only push if _both_ are negative... + if (!intersection && !contains(comparison, 'string', string)) { + result.push(prefix + string); + } + + // or _both_ are positive + if (intersection && contains(comparison, 'string', string)) { + result.push(prefix + string); + } + } + return result; +} + +/** + * Zip strings + */ + +function zip(a, b) { + let arr = []; + for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); + return arr; +} + +function compare(a, b) { + return a > b ? 1 : b > a ? -1 : 0; +} + +function contains(arr, key, val) { + return arr.some(ele => ele[key] === val); +} + +function countNines(min, len) { + return Number(String(min).slice(0, -len) + '9'.repeat(len)); +} + +function countZeros(integer, zeros) { + return integer - (integer % Math.pow(10, zeros)); +} + +function toQuantifier(digits) { + let [start = 0, stop = ''] = digits; + if (stop || start > 1) { + return `{${start + (stop ? ',' + stop : '')}}`; + } + return ''; +} + +function toCharacterClass(a, b, options) { + return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; +} + +function hasPadding(str) { + return /^-?(0+)\d/.test(str); +} + +function padZeros(value, tok, options) { + if (!tok.isPadded) { + return value; + } + + let diff = Math.abs(tok.maxLen - String(value).length); + let relax = options.relaxZeros !== false; + + switch (diff) { + case 0: + return ''; + case 1: + return relax ? '0?' : '0'; + case 2: + return relax ? '0{0,2}' : '00'; + default: { + return relax ? `0{0,${diff}}` : `0{${diff}}`; + } + } +} + +/** + * Cache + */ + +toRegexRange.cache = {}; +toRegexRange.clearCache = () => (toRegexRange.cache = {}); + +/** + * Expose `toRegexRange` + */ + +module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json new file mode 100644 index 0000000..4ef194f --- /dev/null +++ b/node_modules/to-regex-range/package.json @@ -0,0 +1,88 @@ +{ + "name": "to-regex-range", + "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", + "version": "5.0.1", + "homepage": "https://github.com/micromatch/to-regex-range", + "author": "Jon Schlinkert (https://github.com/jonschlinkert)", + "contributors": [ + "Jon Schlinkert (http://twitter.com/jonschlinkert)", + "Rouven Weßling (www.rouvenwessling.de)" + ], + "repository": "micromatch/to-regex-range", + "bugs": { + "url": "https://github.com/micromatch/to-regex-range/issues" + }, + "license": "MIT", + "files": [ + "index.js" + ], + "main": "index.js", + "engines": { + "node": ">=8.0" + }, + "scripts": { + "test": "mocha" + }, + "dependencies": { + "is-number": "^7.0.0" + }, + "devDependencies": { + "fill-range": "^6.0.0", + "gulp-format-md": "^2.0.0", + "mocha": "^6.0.2", + "text-table": "^0.2.0", + "time-diff": "^0.3.1" + }, + "keywords": [ + "bash", + "date", + "expand", + "expansion", + "expression", + "glob", + "match", + "match date", + "match number", + "match numbers", + "match year", + "matches", + "matching", + "number", + "numbers", + "numerical", + "range", + "ranges", + "regex", + "regexp", + "regular", + "regular expression", + "sequence" + ], + "verb": { + "layout": "default", + "toc": false, + "tasks": [ + "readme" + ], + "plugins": [ + "gulp-format-md" + ], + "lint": { + "reflinks": true + }, + "helpers": { + "examples": { + "displayName": "examples" + } + }, + "related": { + "list": [ + "expand-range", + "fill-range", + "micromatch", + "repeat-element", + "repeat-string" + ] + } + } +} diff --git a/node_modules/which/CHANGELOG.md b/node_modules/which/CHANGELOG.md new file mode 100644 index 0000000..7fb1f20 --- /dev/null +++ b/node_modules/which/CHANGELOG.md @@ -0,0 +1,166 @@ +# Changes + + +## 2.0.2 + +* Rename bin to `node-which` + +## 2.0.1 + +* generate changelog and publish on version bump +* enforce 100% test coverage +* Promise interface + +## 2.0.0 + +* Parallel tests, modern JavaScript, and drop support for node < 8 + +## 1.3.1 + +* update deps +* update travis + +## v1.3.0 + +* Add nothrow option to which.sync +* update tap + +## v1.2.14 + +* appveyor: drop node 5 and 0.x +* travis-ci: add node 6, drop 0.x + +## v1.2.13 + +* test: Pass missing option to pass on windows +* update tap +* update isexe to 2.0.0 +* neveragain.tech pledge request + +## v1.2.12 + +* Removed unused require + +## v1.2.11 + +* Prevent changelog script from being included in package + +## v1.2.10 + +* Use env.PATH only, not env.Path + +## v1.2.9 + +* fix for paths starting with ../ +* Remove unused `is-absolute` module + +## v1.2.8 + +* bullet items in changelog that contain (but don't start with) # + +## v1.2.7 + +* strip 'update changelog' changelog entries out of changelog + +## v1.2.6 + +* make the changelog bulleted + +## v1.2.5 + +* make a changelog, and keep it up to date +* don't include tests in package +* Properly handle relative-path executables +* appveyor +* Attach error code to Not Found error +* Make tests pass on Windows + +## v1.2.4 + +* Fix typo + +## v1.2.3 + +* update isexe, fix regression in pathExt handling + +## v1.2.2 + +* update deps, use isexe module, test windows + +## v1.2.1 + +* Sometimes windows PATH entries are quoted +* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. +* doc cli + +## v1.2.0 + +* Add support for opt.all and -as cli flags +* test the bin +* update travis +* Allow checking for multiple programs in bin/which +* tap 2 + +## v1.1.2 + +* travis +* Refactored and fixed undefined error on Windows +* Support strict mode + +## v1.1.1 + +* test +g exes against secondary groups, if available +* Use windows exe semantics on cygwin & msys +* cwd should be first in path on win32, not last +* Handle lower-case 'env.Path' on Windows +* Update docs +* use single-quotes + +## v1.1.0 + +* Add tests, depend on is-absolute + +## v1.0.9 + +* which.js: root is allowed to execute files owned by anyone + +## v1.0.8 + +* don't use graceful-fs + +## v1.0.7 + +* add license to package.json + +## v1.0.6 + +* isc license + +## 1.0.5 + +* Awful typo + +## 1.0.4 + +* Test for path absoluteness properly +* win: Allow '' as a pathext if cmd has a . in it + +## 1.0.3 + +* Remove references to execPath +* Make `which.sync()` work on Windows by honoring the PATHEXT variable. +* Make `isExe()` always return true on Windows. +* MIT + +## 1.0.2 + +* Only files can be exes + +## 1.0.1 + +* Respect the PATHEXT env for win32 support +* should 0755 the bin +* binary +* guts +* package +* 1st diff --git a/node_modules/which/LICENSE b/node_modules/which/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/which/README.md b/node_modules/which/README.md new file mode 100644 index 0000000..cd83350 --- /dev/null +++ b/node_modules/which/README.md @@ -0,0 +1,54 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +var which = require('which') + +// async usage +which('node', function (er, resolvedPath) { + // er is returned if no "node" is found on the PATH + // if it is found, then the absolute path to the exec is returned +}) + +// or promise +which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) + +// sync usage +// throws if not found +var resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +resolved = which.sync('node', {nothrow: true}) + +// Pass options to override the PATH and PATHEXT environment vars. +which('node', { path: someOtherPath }, function (er, resolved) { + if (er) + throw er + console.log('found at %j', resolved) +}) +``` + +## CLI USAGE + +Same as the BSD `which(1)` binary. + +``` +usage: which [-as] program ... +``` + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/node_modules/which/bin/node-which b/node_modules/which/bin/node-which new file mode 100755 index 0000000..7cee372 --- /dev/null +++ b/node_modules/which/bin/node-which @@ -0,0 +1,52 @@ +#!/usr/bin/env node +var which = require("../") +if (process.argv.length < 3) + usage() + +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) +} + +var all = false +var silent = false +var dashdash = false +var args = process.argv.slice(2).filter(function (arg) { + if (dashdash || !/^-/.test(arg)) + return true + + if (arg === '--') { + dashdash = true + return false + } + + var flags = arg.substr(1).split('') + for (var f = 0; f < flags.length; f++) { + var flag = flags[f] + switch (flag) { + case 's': + silent = true + break + case 'a': + all = true + break + default: + console.error('which: illegal option -- ' + flag) + usage() + } + } + return false +}) + +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/node_modules/which/package.json b/node_modules/which/package.json new file mode 100644 index 0000000..97ad7fb --- /dev/null +++ b/node_modules/which/package.json @@ -0,0 +1,43 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "2.0.2", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-which.git" + }, + "main": "which.js", + "bin": { + "node-which": "./bin/node-which" + }, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "devDependencies": { + "mkdirp": "^0.5.0", + "rimraf": "^2.6.2", + "tap": "^14.6.9" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublish": "npm run changelog", + "prechangelog": "bash gen-changelog.sh", + "changelog": "git add CHANGELOG.md", + "postchangelog": "git commit -m 'update changelog - '${npm_package_version}", + "postpublish": "git push origin --follow-tags" + }, + "files": [ + "which.js", + "bin/node-which" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/which/which.js b/node_modules/which/which.js new file mode 100644 index 0000000..82afffd --- /dev/null +++ b/node_modules/which/which.js @@ -0,0 +1,125 @@ +const isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +const path = require('path') +const COLON = isWindows ? ';' : ':' +const isexe = require('isexe') + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, opt) => { + const colon = opt.colon || COLON + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] + : ( + [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(opt.path || process.env.PATH || + /* istanbul ignore next: very unusual */ '').split(colon), + ] + ) + const pathExtExe = isWindows + ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' + : '' + const pathExt = isWindows ? pathExtExe.split(colon) : [''] + + if (isWindows) { + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + return { + pathEnv, + pathExt, + pathExtExe, + } +} + +const which = (cmd, opt, cb) => { + if (typeof opt === 'function') { + cb = opt + opt = {} + } + if (!opt) + opt = {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + const step = i => new Promise((resolve, reject) => { + if (i === pathEnv.length) + return opt.all && found.length ? resolve(found) + : reject(getNotFoundError(cmd)) + + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + resolve(subStep(p, i, 0)) + }) + + const subStep = (p, i, ii) => new Promise((resolve, reject) => { + if (ii === pathExt.length) + return resolve(step(i + 1)) + const ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return resolve(p + ext) + } + return resolve(subStep(p, i, ii + 1)) + }) + }) + + return cb ? step(0).then(res => cb(null, res), cb) : step(0) +} + +const whichSync = (cmd, opt) => { + opt = opt || {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (let i = 0; i < pathEnv.length; i ++) { + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + for (let j = 0; j < pathExt.length; j ++) { + const cur = p + pathExt[j] + try { + const is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } + + if (opt.all && found.length) + return found + + if (opt.nothrow) + return null + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..3037fe3 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,489 @@ +{ + "name": "design-patterns-cpp14", + "version": "1.0.23", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "design-patterns-cpp14", + "version": "1.0.23", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "metacommon": "git+https://github.com/makiolo/metacommon.git" + }, + "devDependencies": { + "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/metacommon": { + "version": "1.0.1", + "resolved": "git+ssh://git@github.com/makiolo/metacommon.git#0eeff0ebfdbe322e8c01ee9c5cfae58dbf8b1f0c", + "hasInstallScript": true, + "license": "MIT" + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/npm-mas-mas": { + "version": "0.0.1", + "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", + "dev": true, + "license": "MIT", + "dependencies": { + "shelljs": ">=0.8.5" + }, + "bin": { + "cmaki": "cmaki_scripts/cmaki.js" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shelljs": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", + "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "execa": "^5.1.1", + "fast-glob": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + } + } +} From 6785347cd31643961f2fff667a0385a572ef6e76 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 17 Aug 2025 06:37:10 +0000 Subject: [PATCH 3/3] Fix potential bugs: replace std::exception with std::runtime_error, fix destructors, improve cache management Co-authored-by: makiolo <6398423+makiolo@users.noreply.github.com> --- .gitignore | 16 +- include/factory.h | 48 +- include/memoize.h | 151 +- node_modules/.bin/cmaki | 1 - node_modules/.bin/node-which | 1 - node_modules/.package-lock.json | 477 --- node_modules/@nodelib/fs.scandir/LICENSE | 21 - node_modules/@nodelib/fs.scandir/README.md | 171 -- .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 20 - .../@nodelib/fs.scandir/out/adapters/fs.js | 19 - .../@nodelib/fs.scandir/out/constants.d.ts | 4 - .../@nodelib/fs.scandir/out/constants.js | 17 - .../@nodelib/fs.scandir/out/index.d.ts | 12 - node_modules/@nodelib/fs.scandir/out/index.js | 26 - .../fs.scandir/out/providers/async.d.ts | 7 - .../fs.scandir/out/providers/async.js | 104 - .../fs.scandir/out/providers/common.d.ts | 1 - .../fs.scandir/out/providers/common.js | 13 - .../fs.scandir/out/providers/sync.d.ts | 5 - .../@nodelib/fs.scandir/out/providers/sync.js | 54 - .../@nodelib/fs.scandir/out/settings.d.ts | 20 - .../@nodelib/fs.scandir/out/settings.js | 24 - .../@nodelib/fs.scandir/out/types/index.d.ts | 20 - .../@nodelib/fs.scandir/out/types/index.js | 2 - .../@nodelib/fs.scandir/out/utils/fs.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/fs.js | 19 - .../@nodelib/fs.scandir/out/utils/index.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/index.js | 5 - node_modules/@nodelib/fs.scandir/package.json | 44 - node_modules/@nodelib/fs.stat/LICENSE | 21 - node_modules/@nodelib/fs.stat/README.md | 126 - .../@nodelib/fs.stat/out/adapters/fs.d.ts | 13 - .../@nodelib/fs.stat/out/adapters/fs.js | 17 - node_modules/@nodelib/fs.stat/out/index.d.ts | 12 - node_modules/@nodelib/fs.stat/out/index.js | 26 - .../@nodelib/fs.stat/out/providers/async.d.ts | 4 - .../@nodelib/fs.stat/out/providers/async.js | 36 - .../@nodelib/fs.stat/out/providers/sync.d.ts | 3 - .../@nodelib/fs.stat/out/providers/sync.js | 23 - .../@nodelib/fs.stat/out/settings.d.ts | 16 - node_modules/@nodelib/fs.stat/out/settings.js | 16 - .../@nodelib/fs.stat/out/types/index.d.ts | 4 - .../@nodelib/fs.stat/out/types/index.js | 2 - node_modules/@nodelib/fs.stat/package.json | 37 - node_modules/@nodelib/fs.walk/LICENSE | 21 - node_modules/@nodelib/fs.walk/README.md | 215 -- node_modules/@nodelib/fs.walk/out/index.d.ts | 14 - node_modules/@nodelib/fs.walk/out/index.js | 34 - .../@nodelib/fs.walk/out/providers/async.d.ts | 12 - .../@nodelib/fs.walk/out/providers/async.js | 30 - .../@nodelib/fs.walk/out/providers/index.d.ts | 4 - .../@nodelib/fs.walk/out/providers/index.js | 9 - .../fs.walk/out/providers/stream.d.ts | 12 - .../@nodelib/fs.walk/out/providers/stream.js | 34 - .../@nodelib/fs.walk/out/providers/sync.d.ts | 10 - .../@nodelib/fs.walk/out/providers/sync.js | 14 - .../@nodelib/fs.walk/out/readers/async.d.ts | 30 - .../@nodelib/fs.walk/out/readers/async.js | 97 - .../@nodelib/fs.walk/out/readers/common.d.ts | 7 - .../@nodelib/fs.walk/out/readers/common.js | 31 - .../@nodelib/fs.walk/out/readers/reader.d.ts | 6 - .../@nodelib/fs.walk/out/readers/reader.js | 11 - .../@nodelib/fs.walk/out/readers/sync.d.ts | 15 - .../@nodelib/fs.walk/out/readers/sync.js | 59 - .../@nodelib/fs.walk/out/settings.d.ts | 30 - node_modules/@nodelib/fs.walk/out/settings.js | 26 - .../@nodelib/fs.walk/out/types/index.d.ts | 8 - .../@nodelib/fs.walk/out/types/index.js | 2 - node_modules/@nodelib/fs.walk/package.json | 44 - node_modules/braces/LICENSE | 21 - node_modules/braces/README.md | 586 ---- node_modules/braces/index.js | 170 -- node_modules/braces/lib/compile.js | 60 - node_modules/braces/lib/constants.js | 57 - node_modules/braces/lib/expand.js | 113 - node_modules/braces/lib/parse.js | 331 --- node_modules/braces/lib/stringify.js | 32 - node_modules/braces/lib/utils.js | 122 - node_modules/braces/package.json | 77 - node_modules/cross-spawn/LICENSE | 21 - node_modules/cross-spawn/README.md | 89 - node_modules/cross-spawn/index.js | 39 - node_modules/cross-spawn/lib/enoent.js | 59 - node_modules/cross-spawn/lib/parse.js | 91 - node_modules/cross-spawn/lib/util/escape.js | 47 - .../cross-spawn/lib/util/readShebang.js | 23 - .../cross-spawn/lib/util/resolveCommand.js | 52 - node_modules/cross-spawn/package.json | 73 - node_modules/execa/index.d.ts | 564 ---- node_modules/execa/index.js | 268 -- node_modules/execa/lib/command.js | 52 - node_modules/execa/lib/error.js | 88 - node_modules/execa/lib/kill.js | 115 - node_modules/execa/lib/promise.js | 46 - node_modules/execa/lib/stdio.js | 52 - node_modules/execa/lib/stream.js | 97 - node_modules/execa/license | 9 - node_modules/execa/package.json | 74 - node_modules/execa/readme.md | 663 ----- node_modules/fast-glob/LICENSE | 21 - node_modules/fast-glob/README.md | 830 ------ node_modules/fast-glob/out/index.d.ts | 40 - node_modules/fast-glob/out/index.js | 102 - .../fast-glob/out/managers/tasks.d.ts | 22 - node_modules/fast-glob/out/managers/tasks.js | 110 - .../fast-glob/out/providers/async.d.ts | 9 - node_modules/fast-glob/out/providers/async.js | 23 - .../fast-glob/out/providers/filters/deep.d.ts | 16 - .../fast-glob/out/providers/filters/deep.js | 62 - .../out/providers/filters/entry.d.ts | 17 - .../fast-glob/out/providers/filters/entry.js | 85 - .../out/providers/filters/error.d.ts | 8 - .../fast-glob/out/providers/filters/error.js | 15 - .../out/providers/matchers/matcher.d.ts | 33 - .../out/providers/matchers/matcher.js | 45 - .../out/providers/matchers/partial.d.ts | 4 - .../out/providers/matchers/partial.js | 38 - .../fast-glob/out/providers/provider.d.ts | 19 - .../fast-glob/out/providers/provider.js | 48 - .../fast-glob/out/providers/stream.d.ts | 11 - .../fast-glob/out/providers/stream.js | 31 - .../fast-glob/out/providers/sync.d.ts | 9 - node_modules/fast-glob/out/providers/sync.js | 23 - .../out/providers/transformers/entry.d.ts | 8 - .../out/providers/transformers/entry.js | 26 - node_modules/fast-glob/out/readers/async.d.ts | 10 - node_modules/fast-glob/out/readers/async.js | 35 - .../fast-glob/out/readers/reader.d.ts | 15 - node_modules/fast-glob/out/readers/reader.js | 33 - .../fast-glob/out/readers/stream.d.ts | 14 - node_modules/fast-glob/out/readers/stream.js | 55 - node_modules/fast-glob/out/readers/sync.d.ts | 12 - node_modules/fast-glob/out/readers/sync.js | 43 - node_modules/fast-glob/out/settings.d.ts | 164 -- node_modules/fast-glob/out/settings.js | 59 - node_modules/fast-glob/out/types/index.d.ts | 31 - node_modules/fast-glob/out/types/index.js | 2 - node_modules/fast-glob/out/utils/array.d.ts | 2 - node_modules/fast-glob/out/utils/array.js | 22 - node_modules/fast-glob/out/utils/errno.d.ts | 2 - node_modules/fast-glob/out/utils/errno.js | 7 - node_modules/fast-glob/out/utils/fs.d.ts | 4 - node_modules/fast-glob/out/utils/fs.js | 19 - node_modules/fast-glob/out/utils/index.d.ts | 8 - node_modules/fast-glob/out/utils/index.js | 17 - node_modules/fast-glob/out/utils/path.d.ts | 13 - node_modules/fast-glob/out/utils/path.js | 68 - node_modules/fast-glob/out/utils/pattern.d.ts | 49 - node_modules/fast-glob/out/utils/pattern.js | 206 -- node_modules/fast-glob/out/utils/stream.d.ts | 4 - node_modules/fast-glob/out/utils/stream.js | 17 - node_modules/fast-glob/out/utils/string.d.ts | 2 - node_modules/fast-glob/out/utils/string.js | 11 - node_modules/fast-glob/package.json | 81 - node_modules/fastq/.github/dependabot.yml | 11 - node_modules/fastq/.github/workflows/ci.yml | 75 - node_modules/fastq/LICENSE | 13 - node_modules/fastq/README.md | 312 -- node_modules/fastq/SECURITY.md | 15 - node_modules/fastq/bench.js | 66 - node_modules/fastq/example.js | 14 - node_modules/fastq/example.mjs | 11 - node_modules/fastq/index.d.ts | 57 - node_modules/fastq/package.json | 53 - node_modules/fastq/queue.js | 311 -- node_modules/fastq/test/example.ts | 83 - node_modules/fastq/test/promise.js | 291 -- node_modules/fastq/test/test.js | 653 ----- node_modules/fastq/test/tsconfig.json | 11 - node_modules/fill-range/LICENSE | 21 - node_modules/fill-range/README.md | 237 -- node_modules/fill-range/index.js | 248 -- node_modules/fill-range/package.json | 74 - node_modules/get-stream/buffer-stream.js | 52 - node_modules/get-stream/index.d.ts | 105 - node_modules/get-stream/index.js | 61 - node_modules/get-stream/license | 9 - node_modules/get-stream/package.json | 47 - node_modules/get-stream/readme.md | 124 - node_modules/glob-parent/CHANGELOG.md | 110 - node_modules/glob-parent/LICENSE | 15 - node_modules/glob-parent/README.md | 137 - node_modules/glob-parent/index.js | 42 - node_modules/glob-parent/package.json | 48 - node_modules/human-signals/CHANGELOG.md | 11 - node_modules/human-signals/LICENSE | 201 -- node_modules/human-signals/README.md | 165 -- node_modules/human-signals/build/src/core.js | 273 -- .../human-signals/build/src/core.js.map | 1 - .../human-signals/build/src/main.d.ts | 52 - node_modules/human-signals/build/src/main.js | 71 - .../human-signals/build/src/main.js.map | 1 - .../human-signals/build/src/realtime.js | 19 - .../human-signals/build/src/realtime.js.map | 1 - .../human-signals/build/src/signals.js | 35 - .../human-signals/build/src/signals.js.map | 1 - node_modules/human-signals/package.json | 64 - node_modules/is-extglob/LICENSE | 21 - node_modules/is-extglob/README.md | 107 - node_modules/is-extglob/index.js | 20 - node_modules/is-extglob/package.json | 69 - node_modules/is-glob/LICENSE | 21 - node_modules/is-glob/README.md | 206 -- node_modules/is-glob/index.js | 150 - node_modules/is-glob/package.json | 81 - node_modules/is-number/LICENSE | 21 - node_modules/is-number/README.md | 187 -- node_modules/is-number/index.js | 18 - node_modules/is-number/package.json | 82 - node_modules/is-stream/index.d.ts | 79 - node_modules/is-stream/index.js | 28 - node_modules/is-stream/license | 9 - node_modules/is-stream/package.json | 42 - node_modules/is-stream/readme.md | 60 - node_modules/isexe/.npmignore | 2 - node_modules/isexe/LICENSE | 15 - node_modules/isexe/README.md | 51 - node_modules/isexe/index.js | 57 - node_modules/isexe/mode.js | 41 - node_modules/isexe/package.json | 31 - node_modules/isexe/test/basic.js | 221 -- node_modules/isexe/windows.js | 42 - node_modules/merge-stream/LICENSE | 21 - node_modules/merge-stream/README.md | 78 - node_modules/merge-stream/index.js | 41 - node_modules/merge-stream/package.json | 19 - node_modules/merge2/LICENSE | 21 - node_modules/merge2/README.md | 144 - node_modules/merge2/index.js | 144 - node_modules/merge2/package.json | 43 - node_modules/metacommon/.travis.yml | 5 - node_modules/metacommon/README.md | 2 - node_modules/metacommon/cmaki.yml | 14 - node_modules/metacommon/common.h | 426 --- node_modules/metacommon/compile.sh | 3 - node_modules/metacommon/package.json | 32 - node_modules/metacommon/setup.sh | 3 - node_modules/micromatch/LICENSE | 21 - node_modules/micromatch/README.md | 1024 ------- node_modules/micromatch/index.js | 474 --- node_modules/micromatch/package.json | 119 - node_modules/mimic-fn/index.d.ts | 54 - node_modules/mimic-fn/index.js | 13 - node_modules/mimic-fn/license | 9 - node_modules/mimic-fn/package.json | 42 - node_modules/mimic-fn/readme.md | 69 - node_modules/npm-mas-mas/.travis.yml | 15 - node_modules/npm-mas-mas/LICENSE | 21 - node_modules/npm-mas-mas/Makefile | 21 - node_modules/npm-mas-mas/README | 57 - node_modules/npm-mas-mas/cmaki/.travis.yml | 5 - node_modules/npm-mas-mas/cmaki/GitUtils.cmake | 157 - node_modules/npm-mas-mas/cmaki/LICENSE | 22 - node_modules/npm-mas-mas/cmaki/README.md | 4 - node_modules/npm-mas-mas/cmaki/Utils.cmake | 32 - .../cmaki/ci/detect_operative_system.sh | 14 - node_modules/npm-mas-mas/cmaki/cmaki.cmake | 529 ---- .../npm-mas-mas/cmaki/facts/facts.cmake | 735 ----- .../npm-mas-mas/cmaki/init/.clang-format | 66 - .../npm-mas-mas/cmaki/junit/CTest2JUnit.xsl | 120 - .../npm-mas-mas/cmaki/junit/README.md | 3 - .../npm-mas-mas/cmaki_docker/.travis.yml | 4 - node_modules/npm-mas-mas/cmaki_docker/LICENSE | 21 - .../npm-mas-mas/cmaki_docker/README.md | 11 - .../npm-mas-mas/cmaki_docker/build.sh | 40 - .../cmaki_generator/CMakeLists.txt | 95 - .../npm-mas-mas/cmaki_generator/LICENSE | 22 - .../npm-mas-mas/cmaki_generator/README.md | 22 - .../npm-mas-mas/cmaki_generator/build | 10 - .../npm-mas-mas/cmaki_generator/build.cmd | 11 - .../npm-mas-mas/cmaki_generator/build.py | 757 ----- .../cmaki_generator/check_remote_version.py | 233 -- .../npm-mas-mas/cmaki_generator/common.yml | 498 ---- .../cmaki_generator/compilation.py | 238 -- .../cmaki_generator/download_package.py | 11 - .../cmaki_generator/get_package.py | 26 - .../cmaki_generator/get_return_code.py | 35 - .../cmaki_generator/gwen/CMakeLists.txt | 47 - .../cmaki_generator/hash_version.py | 172 -- .../cmaki_generator/junit/CTest2JUnit.xsl | 120 - .../cmaki_generator/junit/README.md | 3 - .../librocket/Build/CMakeLists.txt | 687 ----- .../cmaki_generator/librocket/CMakeLists.txt | 2 - .../cmaki_generator/noise/CMakeLists.txt | 26 - .../ois/demos/FFConsoleDemo.cpp | 1147 -------- .../cmaki_generator/ois/demos/Makefile.am | 11 - .../cmaki_generator/ois/demos/OISConsole.cpp | 459 --- .../ois/src/linux/LinuxForceFeedback.cpp | 563 ---- .../ois/src/linux/LinuxJoyStickEvents.cpp | 308 -- .../cmaki_generator/oxygine/CMakeLists.txt | 546 ---- .../cmaki_generator/packages/assimp.yml | 13 - .../cmaki_generator/packages/box2d.yml | 23 - .../cmaki_generator/packages/bullet2.yml | 54 - .../cmaki_generator/packages/cryptopp.yml | 70 - .../packages/dune-freetype.yml | 28 - .../cmaki_generator/packages/dune-glew.yml | 29 - .../cmaki_generator/packages/dune-zlib.yml | 38 - .../cmaki_generator/packages/fmod.yml | 20 - .../cmaki_generator/packages/freeimage.yml | 36 - .../packages/freeimage_cmake.yml | 40 - .../cmaki_generator/packages/google-gmock.yml | 61 - .../cmaki_generator/packages/gwen.yml | 11 - .../cmaki_generator/packages/haxx-libcurl.yml | 71 - .../cmaki_generator/packages/json.yml | 26 - .../cmaki_generator/packages/librocket.yml | 24 - .../cmaki_generator/packages/msgpack.yml | 10 - .../cmaki_generator/packages/noise.yml | 11 - .../cmaki_generator/packages/ois.yml | 19 - .../cmaki_generator/packages/openssl.yml | 24 - .../cmaki_generator/packages/oxygine.yml | 25 - .../cmaki_generator/packages/paho-mqtt3.yml | 22 - .../cmaki_generator/packages/paho-mqttpp3.yml | 21 - .../cmaki_generator/packages/pugixml.yml | 11 - .../cmaki_generator/packages/python.yml | 21 - .../cmaki_generator/packages/raknet.yml | 11 - .../packages/restclient-cpp.yml | 17 - .../cmaki_generator/packages/sdl2.yml | 38 - .../cmaki_generator/packages/spdlog.yml | 14 - .../cmaki_generator/packages/tbb.yml | 49 - .../cmaki_generator/packages/yamlcpp.yml | 16 - .../npm-mas-mas/cmaki_generator/packing.py | 139 - .../paho-mqttpp3/CMakeLists.txt | 75 - .../paho-mqttpp3/src/CMakeLists.txt | 161 - .../npm-mas-mas/cmaki_generator/pipeline.py | 287 -- .../npm-mas-mas/cmaki_generator/prepare.py | 72 - .../npm-mas-mas/cmaki_generator/purge.py | 36 - .../raknet/Lib/LibStatic/CMakeLists.txt | 34 - .../raknet/Source/CCRakNetSlidingWindow.cpp | 372 --- .../raknet/Source/ReplicaManager3.cpp | 2593 ----------------- .../npm-mas-mas/cmaki_generator/run.sh | 10 - .../npm-mas-mas/cmaki_generator/run_test.sh | 27 - .../npm-mas-mas/cmaki_generator/run_tests.py | 175 -- .../cmaki_generator/save_package.py | 31 - .../sdl2-emscripten/CMakeLists.txt | 1366 --------- .../cmaki_generator/sdl2/CMakeLists.txt | 1849 ------------ .../npm-mas-mas/cmaki_generator/sync.sh | 12 - .../cmaki_generator/third_party.py | 1508 ---------- .../cmaki_generator/unittest/CMakeLists.txt | 30 - .../npm-mas-mas/cmaki_generator/upload.py | 35 - .../cmaki_generator/upload_package.py | 48 - .../npm-mas-mas/cmaki_generator/utils.py | 531 ---- .../npm-mas-mas/cmaki_identifier/.travis.yml | 12 - .../cmaki_identifier/CMakeLists.txt | 6 - .../npm-mas-mas/cmaki_identifier/README.md | 19 - .../cmaki_identifier/boostorg_predef | 1 - .../cmaki_identifier/cmaki_emulator.sh | 36 - .../cmaki_identifier/cmaki_identifier.cmake | 12 - .../cmaki_identifier/cmaki_identifier.sh | 14 - .../cmaki_identifier/gcc/Debug/CMakeCache.txt | 113 - .../gcc/Debug/CMakeFiles/cmake.check_cache | 1 - .../cmaki_identifier/node_modules/.bin/cmaki | 1 - .../node_modules/.bin/node-which | 1 - .../node_modules/.package-lock.json | 471 --- .../node_modules/@nodelib/fs.scandir/LICENSE | 21 - .../@nodelib/fs.scandir/README.md | 171 -- .../@nodelib/fs.scandir/out/adapters/fs.d.ts | 20 - .../@nodelib/fs.scandir/out/adapters/fs.js | 19 - .../@nodelib/fs.scandir/out/constants.d.ts | 4 - .../@nodelib/fs.scandir/out/constants.js | 17 - .../@nodelib/fs.scandir/out/index.d.ts | 12 - .../@nodelib/fs.scandir/out/index.js | 26 - .../fs.scandir/out/providers/async.d.ts | 7 - .../fs.scandir/out/providers/async.js | 104 - .../fs.scandir/out/providers/common.d.ts | 1 - .../fs.scandir/out/providers/common.js | 13 - .../fs.scandir/out/providers/sync.d.ts | 5 - .../@nodelib/fs.scandir/out/providers/sync.js | 54 - .../@nodelib/fs.scandir/out/settings.d.ts | 20 - .../@nodelib/fs.scandir/out/settings.js | 24 - .../@nodelib/fs.scandir/out/types/index.d.ts | 20 - .../@nodelib/fs.scandir/out/types/index.js | 2 - .../@nodelib/fs.scandir/out/utils/fs.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/fs.js | 19 - .../@nodelib/fs.scandir/out/utils/index.d.ts | 2 - .../@nodelib/fs.scandir/out/utils/index.js | 5 - .../@nodelib/fs.scandir/package.json | 44 - .../node_modules/@nodelib/fs.stat/LICENSE | 21 - .../node_modules/@nodelib/fs.stat/README.md | 126 - .../@nodelib/fs.stat/out/adapters/fs.d.ts | 13 - .../@nodelib/fs.stat/out/adapters/fs.js | 17 - .../@nodelib/fs.stat/out/index.d.ts | 12 - .../@nodelib/fs.stat/out/index.js | 26 - .../@nodelib/fs.stat/out/providers/async.d.ts | 4 - .../@nodelib/fs.stat/out/providers/async.js | 36 - .../@nodelib/fs.stat/out/providers/sync.d.ts | 3 - .../@nodelib/fs.stat/out/providers/sync.js | 23 - .../@nodelib/fs.stat/out/settings.d.ts | 16 - .../@nodelib/fs.stat/out/settings.js | 16 - .../@nodelib/fs.stat/out/types/index.d.ts | 4 - .../@nodelib/fs.stat/out/types/index.js | 2 - .../@nodelib/fs.stat/package.json | 37 - .../node_modules/@nodelib/fs.walk/LICENSE | 21 - .../node_modules/@nodelib/fs.walk/README.md | 215 -- .../@nodelib/fs.walk/out/index.d.ts | 14 - .../@nodelib/fs.walk/out/index.js | 34 - .../@nodelib/fs.walk/out/providers/async.d.ts | 12 - .../@nodelib/fs.walk/out/providers/async.js | 30 - .../@nodelib/fs.walk/out/providers/index.d.ts | 4 - .../@nodelib/fs.walk/out/providers/index.js | 9 - .../fs.walk/out/providers/stream.d.ts | 12 - .../@nodelib/fs.walk/out/providers/stream.js | 34 - .../@nodelib/fs.walk/out/providers/sync.d.ts | 10 - .../@nodelib/fs.walk/out/providers/sync.js | 14 - .../@nodelib/fs.walk/out/readers/async.d.ts | 30 - .../@nodelib/fs.walk/out/readers/async.js | 97 - .../@nodelib/fs.walk/out/readers/common.d.ts | 7 - .../@nodelib/fs.walk/out/readers/common.js | 31 - .../@nodelib/fs.walk/out/readers/reader.d.ts | 6 - .../@nodelib/fs.walk/out/readers/reader.js | 11 - .../@nodelib/fs.walk/out/readers/sync.d.ts | 15 - .../@nodelib/fs.walk/out/readers/sync.js | 59 - .../@nodelib/fs.walk/out/settings.d.ts | 30 - .../@nodelib/fs.walk/out/settings.js | 26 - .../@nodelib/fs.walk/out/types/index.d.ts | 8 - .../@nodelib/fs.walk/out/types/index.js | 2 - .../@nodelib/fs.walk/package.json | 44 - .../node_modules/braces/LICENSE | 21 - .../node_modules/braces/README.md | 586 ---- .../node_modules/braces/index.js | 170 -- .../node_modules/braces/lib/compile.js | 60 - .../node_modules/braces/lib/constants.js | 57 - .../node_modules/braces/lib/expand.js | 113 - .../node_modules/braces/lib/parse.js | 331 --- .../node_modules/braces/lib/stringify.js | 32 - .../node_modules/braces/lib/utils.js | 122 - .../node_modules/braces/package.json | 77 - .../node_modules/cross-spawn/LICENSE | 21 - .../node_modules/cross-spawn/README.md | 89 - .../node_modules/cross-spawn/index.js | 39 - .../node_modules/cross-spawn/lib/enoent.js | 59 - .../node_modules/cross-spawn/lib/parse.js | 91 - .../cross-spawn/lib/util/escape.js | 47 - .../cross-spawn/lib/util/readShebang.js | 23 - .../cross-spawn/lib/util/resolveCommand.js | 52 - .../node_modules/cross-spawn/package.json | 73 - .../node_modules/execa/index.d.ts | 564 ---- .../node_modules/execa/index.js | 268 -- .../node_modules/execa/lib/command.js | 52 - .../node_modules/execa/lib/error.js | 88 - .../node_modules/execa/lib/kill.js | 115 - .../node_modules/execa/lib/promise.js | 46 - .../node_modules/execa/lib/stdio.js | 52 - .../node_modules/execa/lib/stream.js | 97 - .../node_modules/execa/license | 9 - .../node_modules/execa/package.json | 74 - .../node_modules/execa/readme.md | 663 ----- .../node_modules/fast-glob/LICENSE | 21 - .../node_modules/fast-glob/README.md | 830 ------ .../node_modules/fast-glob/out/index.d.ts | 40 - .../node_modules/fast-glob/out/index.js | 102 - .../fast-glob/out/managers/tasks.d.ts | 22 - .../fast-glob/out/managers/tasks.js | 110 - .../fast-glob/out/providers/async.d.ts | 9 - .../fast-glob/out/providers/async.js | 23 - .../fast-glob/out/providers/filters/deep.d.ts | 16 - .../fast-glob/out/providers/filters/deep.js | 62 - .../out/providers/filters/entry.d.ts | 17 - .../fast-glob/out/providers/filters/entry.js | 85 - .../out/providers/filters/error.d.ts | 8 - .../fast-glob/out/providers/filters/error.js | 15 - .../out/providers/matchers/matcher.d.ts | 33 - .../out/providers/matchers/matcher.js | 45 - .../out/providers/matchers/partial.d.ts | 4 - .../out/providers/matchers/partial.js | 38 - .../fast-glob/out/providers/provider.d.ts | 19 - .../fast-glob/out/providers/provider.js | 48 - .../fast-glob/out/providers/stream.d.ts | 11 - .../fast-glob/out/providers/stream.js | 31 - .../fast-glob/out/providers/sync.d.ts | 9 - .../fast-glob/out/providers/sync.js | 23 - .../out/providers/transformers/entry.d.ts | 8 - .../out/providers/transformers/entry.js | 26 - .../fast-glob/out/readers/async.d.ts | 10 - .../fast-glob/out/readers/async.js | 35 - .../fast-glob/out/readers/reader.d.ts | 15 - .../fast-glob/out/readers/reader.js | 33 - .../fast-glob/out/readers/stream.d.ts | 14 - .../fast-glob/out/readers/stream.js | 55 - .../fast-glob/out/readers/sync.d.ts | 12 - .../fast-glob/out/readers/sync.js | 43 - .../node_modules/fast-glob/out/settings.d.ts | 164 -- .../node_modules/fast-glob/out/settings.js | 59 - .../fast-glob/out/types/index.d.ts | 31 - .../node_modules/fast-glob/out/types/index.js | 2 - .../fast-glob/out/utils/array.d.ts | 2 - .../node_modules/fast-glob/out/utils/array.js | 22 - .../fast-glob/out/utils/errno.d.ts | 2 - .../node_modules/fast-glob/out/utils/errno.js | 7 - .../node_modules/fast-glob/out/utils/fs.d.ts | 4 - .../node_modules/fast-glob/out/utils/fs.js | 19 - .../fast-glob/out/utils/index.d.ts | 8 - .../node_modules/fast-glob/out/utils/index.js | 17 - .../fast-glob/out/utils/path.d.ts | 13 - .../node_modules/fast-glob/out/utils/path.js | 68 - .../fast-glob/out/utils/pattern.d.ts | 49 - .../fast-glob/out/utils/pattern.js | 206 -- .../fast-glob/out/utils/stream.d.ts | 4 - .../fast-glob/out/utils/stream.js | 17 - .../fast-glob/out/utils/string.d.ts | 2 - .../fast-glob/out/utils/string.js | 11 - .../node_modules/fast-glob/package.json | 81 - .../node_modules/fastq/.github/dependabot.yml | 11 - .../fastq/.github/workflows/ci.yml | 75 - .../node_modules/fastq/LICENSE | 13 - .../node_modules/fastq/README.md | 312 -- .../node_modules/fastq/SECURITY.md | 15 - .../node_modules/fastq/bench.js | 66 - .../node_modules/fastq/example.js | 14 - .../node_modules/fastq/example.mjs | 11 - .../node_modules/fastq/index.d.ts | 57 - .../node_modules/fastq/package.json | 53 - .../node_modules/fastq/queue.js | 311 -- .../node_modules/fastq/test/example.ts | 83 - .../node_modules/fastq/test/promise.js | 291 -- .../node_modules/fastq/test/test.js | 653 ----- .../node_modules/fastq/test/tsconfig.json | 11 - .../node_modules/fill-range/LICENSE | 21 - .../node_modules/fill-range/README.md | 237 -- .../node_modules/fill-range/index.js | 248 -- .../node_modules/fill-range/package.json | 74 - .../node_modules/get-stream/buffer-stream.js | 52 - .../node_modules/get-stream/index.d.ts | 105 - .../node_modules/get-stream/index.js | 61 - .../node_modules/get-stream/license | 9 - .../node_modules/get-stream/package.json | 47 - .../node_modules/get-stream/readme.md | 124 - .../node_modules/glob-parent/CHANGELOG.md | 110 - .../node_modules/glob-parent/LICENSE | 15 - .../node_modules/glob-parent/README.md | 137 - .../node_modules/glob-parent/index.js | 42 - .../node_modules/glob-parent/package.json | 48 - .../node_modules/human-signals/CHANGELOG.md | 11 - .../node_modules/human-signals/LICENSE | 201 -- .../node_modules/human-signals/README.md | 165 -- .../human-signals/build/src/core.js | 273 -- .../human-signals/build/src/core.js.map | 1 - .../human-signals/build/src/main.d.ts | 52 - .../human-signals/build/src/main.js | 71 - .../human-signals/build/src/main.js.map | 1 - .../human-signals/build/src/realtime.js | 19 - .../human-signals/build/src/realtime.js.map | 1 - .../human-signals/build/src/signals.js | 35 - .../human-signals/build/src/signals.js.map | 1 - .../node_modules/human-signals/package.json | 64 - .../node_modules/is-extglob/LICENSE | 21 - .../node_modules/is-extglob/README.md | 107 - .../node_modules/is-extglob/index.js | 20 - .../node_modules/is-extglob/package.json | 69 - .../node_modules/is-glob/LICENSE | 21 - .../node_modules/is-glob/README.md | 206 -- .../node_modules/is-glob/index.js | 150 - .../node_modules/is-glob/package.json | 81 - .../node_modules/is-number/LICENSE | 21 - .../node_modules/is-number/README.md | 187 -- .../node_modules/is-number/index.js | 18 - .../node_modules/is-number/package.json | 82 - .../node_modules/is-stream/index.d.ts | 79 - .../node_modules/is-stream/index.js | 28 - .../node_modules/is-stream/license | 9 - .../node_modules/is-stream/package.json | 42 - .../node_modules/is-stream/readme.md | 60 - .../node_modules/isexe/.npmignore | 2 - .../node_modules/isexe/LICENSE | 15 - .../node_modules/isexe/README.md | 51 - .../node_modules/isexe/index.js | 57 - .../node_modules/isexe/mode.js | 41 - .../node_modules/isexe/package.json | 31 - .../node_modules/isexe/test/basic.js | 221 -- .../node_modules/isexe/windows.js | 42 - .../node_modules/merge-stream/LICENSE | 21 - .../node_modules/merge-stream/README.md | 78 - .../node_modules/merge-stream/index.js | 41 - .../node_modules/merge-stream/package.json | 19 - .../node_modules/merge2/LICENSE | 21 - .../node_modules/merge2/README.md | 144 - .../node_modules/merge2/index.js | 144 - .../node_modules/merge2/package.json | 43 - .../node_modules/micromatch/LICENSE | 21 - .../node_modules/micromatch/README.md | 1024 ------- .../node_modules/micromatch/index.js | 474 --- .../node_modules/micromatch/package.json | 119 - .../node_modules/mimic-fn/index.d.ts | 54 - .../node_modules/mimic-fn/index.js | 13 - .../node_modules/mimic-fn/license | 9 - .../node_modules/mimic-fn/package.json | 42 - .../node_modules/mimic-fn/readme.md | 69 - .../node_modules/npm-mas-mas/.travis.yml | 15 - .../node_modules/npm-mas-mas/LICENSE | 21 - .../node_modules/npm-mas-mas/Makefile | 21 - .../node_modules/npm-mas-mas/README | 57 - .../npm-mas-mas/cmaki/.travis.yml | 5 - .../npm-mas-mas/cmaki/GitUtils.cmake | 157 - .../node_modules/npm-mas-mas/cmaki/LICENSE | 22 - .../node_modules/npm-mas-mas/cmaki/README.md | 4 - .../npm-mas-mas/cmaki/Utils.cmake | 32 - .../cmaki/ci/detect_operative_system.sh | 14 - .../npm-mas-mas/cmaki/cmaki.cmake | 529 ---- .../npm-mas-mas/cmaki/facts/facts.cmake | 735 ----- .../npm-mas-mas/cmaki/init/.clang-format | 66 - .../npm-mas-mas/cmaki/junit/CTest2JUnit.xsl | 120 - .../npm-mas-mas/cmaki/junit/README.md | 3 - .../npm-mas-mas/cmaki_docker/.travis.yml | 4 - .../npm-mas-mas/cmaki_docker/LICENSE | 21 - .../npm-mas-mas/cmaki_docker/README.md | 11 - .../npm-mas-mas/cmaki_docker/build.sh | 40 - .../cmaki_generator/CMakeLists.txt | 95 - .../npm-mas-mas/cmaki_generator/LICENSE | 22 - .../npm-mas-mas/cmaki_generator/README.md | 22 - .../npm-mas-mas/cmaki_generator/build | 10 - .../npm-mas-mas/cmaki_generator/build.cmd | 11 - .../npm-mas-mas/cmaki_generator/build.py | 757 ----- .../cmaki_generator/check_remote_version.py | 233 -- .../npm-mas-mas/cmaki_generator/common.yml | 498 ---- .../cmaki_generator/compilation.py | 238 -- .../cmaki_generator/download_package.py | 11 - .../cmaki_generator/get_package.py | 26 - .../cmaki_generator/get_return_code.py | 35 - .../cmaki_generator/gwen/CMakeLists.txt | 47 - .../cmaki_generator/hash_version.py | 172 -- .../cmaki_generator/junit/CTest2JUnit.xsl | 120 - .../cmaki_generator/junit/README.md | 3 - .../librocket/Build/CMakeLists.txt | 687 ----- .../cmaki_generator/librocket/CMakeLists.txt | 2 - .../cmaki_generator/noise/CMakeLists.txt | 26 - .../ois/demos/FFConsoleDemo.cpp | 1147 -------- .../cmaki_generator/ois/demos/Makefile.am | 11 - .../cmaki_generator/ois/demos/OISConsole.cpp | 459 --- .../ois/src/linux/LinuxForceFeedback.cpp | 563 ---- .../ois/src/linux/LinuxJoyStickEvents.cpp | 308 -- .../cmaki_generator/oxygine/CMakeLists.txt | 546 ---- .../cmaki_generator/packages/assimp.yml | 13 - .../cmaki_generator/packages/box2d.yml | 23 - .../cmaki_generator/packages/bullet2.yml | 54 - .../cmaki_generator/packages/cryptopp.yml | 70 - .../packages/dune-freetype.yml | 28 - .../cmaki_generator/packages/dune-glew.yml | 29 - .../cmaki_generator/packages/dune-zlib.yml | 38 - .../cmaki_generator/packages/fmod.yml | 20 - .../cmaki_generator/packages/freeimage.yml | 36 - .../packages/freeimage_cmake.yml | 40 - .../cmaki_generator/packages/google-gmock.yml | 61 - .../cmaki_generator/packages/gwen.yml | 11 - .../cmaki_generator/packages/haxx-libcurl.yml | 71 - .../cmaki_generator/packages/json.yml | 26 - .../cmaki_generator/packages/librocket.yml | 24 - .../cmaki_generator/packages/msgpack.yml | 10 - .../cmaki_generator/packages/noise.yml | 11 - .../cmaki_generator/packages/ois.yml | 19 - .../cmaki_generator/packages/openssl.yml | 24 - .../cmaki_generator/packages/oxygine.yml | 25 - .../cmaki_generator/packages/paho-mqtt3.yml | 22 - .../cmaki_generator/packages/paho-mqttpp3.yml | 21 - .../cmaki_generator/packages/pugixml.yml | 11 - .../cmaki_generator/packages/python.yml | 21 - .../cmaki_generator/packages/raknet.yml | 11 - .../packages/restclient-cpp.yml | 17 - .../cmaki_generator/packages/sdl2.yml | 38 - .../cmaki_generator/packages/spdlog.yml | 14 - .../cmaki_generator/packages/tbb.yml | 49 - .../cmaki_generator/packages/yamlcpp.yml | 16 - .../npm-mas-mas/cmaki_generator/packing.py | 139 - .../paho-mqttpp3/CMakeLists.txt | 75 - .../paho-mqttpp3/src/CMakeLists.txt | 161 - .../npm-mas-mas/cmaki_generator/pipeline.py | 287 -- .../npm-mas-mas/cmaki_generator/prepare.py | 72 - .../npm-mas-mas/cmaki_generator/purge.py | 36 - .../raknet/Lib/LibStatic/CMakeLists.txt | 34 - .../raknet/Source/CCRakNetSlidingWindow.cpp | 372 --- .../raknet/Source/ReplicaManager3.cpp | 2593 ----------------- .../npm-mas-mas/cmaki_generator/run.sh | 10 - .../npm-mas-mas/cmaki_generator/run_test.sh | 27 - .../npm-mas-mas/cmaki_generator/run_tests.py | 175 -- .../cmaki_generator/save_package.py | 31 - .../sdl2-emscripten/CMakeLists.txt | 1366 --------- .../cmaki_generator/sdl2/CMakeLists.txt | 1849 ------------ .../npm-mas-mas/cmaki_generator/sync.sh | 12 - .../cmaki_generator/third_party.py | 1508 ---------- .../cmaki_generator/unittest/CMakeLists.txt | 30 - .../npm-mas-mas/cmaki_generator/upload.py | 35 - .../cmaki_generator/upload_package.py | 48 - .../npm-mas-mas/cmaki_generator/utils.py | 531 ---- .../npm-mas-mas/cmaki_identifier/.travis.yml | 12 - .../cmaki_identifier/CMakeLists.txt | 6 - .../npm-mas-mas/cmaki_identifier/README.md | 19 - .../cmaki_identifier/cmaki_emulator.sh | 36 - .../cmaki_identifier/cmaki_identifier.cmake | 12 - .../cmaki_identifier/cmaki_identifier.sh | 14 - .../npm-mas-mas/cmaki_identifier/npm-do | 3 - .../npm-mas-mas/cmaki_identifier/package.json | 30 - .../npm-mas-mas/cmaki_identifier/setup.cmd | 7 - .../npm-mas-mas/cmaki_identifier/setup.sh | 8 - .../cmaki_identifier/tests/CMakeLists.txt | 33 - .../tests/cmaki_identifier.cpp | 345 --- .../npm-mas-mas/cmaki_scripts/.travis.yml | 5 - .../npm-mas-mas/cmaki_scripts/LICENSE | 21 - .../npm-mas-mas/cmaki_scripts/README.md | 9 - .../npm-mas-mas/cmaki_scripts/bootstrap.cmd | 15 - .../npm-mas-mas/cmaki_scripts/ci.cmd | 40 - .../npm-mas-mas/cmaki_scripts/ci.sh | 46 - .../npm-mas-mas/cmaki_scripts/clean.cmd | 3 - .../npm-mas-mas/cmaki_scripts/clean.sh | 16 - .../npm-mas-mas/cmaki_scripts/cmaki.cmd | 22 - .../npm-mas-mas/cmaki_scripts/cmaki.js | 193 -- .../cmaki_scripts/cmaki_depends.cmd | 7 - .../cmaki_scripts/cmaki_depends.sh | 50 - .../npm-mas-mas/cmaki_scripts/compile.cmd | 14 - .../npm-mas-mas/cmaki_scripts/compile.sh | 16 - .../cmaki_scripts/create_package.cmd | 28 - .../cmaki_scripts/create_package.sh | 15 - .../npm-mas-mas/cmaki_scripts/docker.sh | 22 - .../cmaki_scripts/head_detached.cmd | 6 - .../cmaki_scripts/head_detached.sh | 7 - .../npm-mas-mas/cmaki_scripts/init.sh | 18 - .../cmaki_scripts/make_artifact.cmd | 30 - .../cmaki_scripts/make_artifact.sh | 18 - .../npm-mas-mas/cmaki_scripts/publish.cmd | 3 - .../npm-mas-mas/cmaki_scripts/publish.sh | 3 - .../npm-mas-mas/cmaki_scripts/replace.sh | 44 - .../npm-mas-mas/cmaki_scripts/run.cmd | 5 - .../npm-mas-mas/cmaki_scripts/search.sh | 4 - .../npm-mas-mas/cmaki_scripts/setup.cmd | 64 - .../npm-mas-mas/cmaki_scripts/setup.sh | 54 - .../npm-mas-mas/cmaki_scripts/test.cmd | 15 - .../npm-mas-mas/cmaki_scripts/test.sh | 52 - .../npm-mas-mas/cmaki_scripts/upload.cmd | 29 - .../npm-mas-mas/cmaki_scripts/upload.sh | 12 - .../cmaki_scripts/upload_package.cmd | 5 - .../cmaki_scripts/upload_package.sh | 7 - .../npm-mas-mas/docker-compose.yml | 32 - .../docker/Dockerfile.android-arm64 | 9 - .../npm-mas-mas/docker/Dockerfile.linux-x64 | 16 - .../npm-mas-mas/docker/Dockerfile.windows-x64 | 9 - .../npm-mas-mas/docker/entrypoint.sh | 21 - .../node_modules/npm-mas-mas/package.json | 29 - .../npm-mas-mas/servfactor/Dockerfile | 15 - .../npm-mas-mas/servfactor/Makefile | 3 - .../npm-mas-mas/servfactor/NOTES.md | 4 - .../npm-mas-mas/servfactor/README.md | 10 - .../npm-mas-mas/servfactor/docker-compose.yml | 11 - .../npm-mas-mas/servfactor/download.php | 58 - .../npm-mas-mas/servfactor/index.php | 227 -- .../npm-mas-mas/servfactor/packages/README.md | 2 - .../npm-mas-mas/servfactor/stats.php | 68 - .../npm-mas-mas/servfactor/upload.php | 76 - .../npm-mas-mas/servfactor/util.php | 2584 ---------------- .../node_modules/npm-run-path/index.d.ts | 89 - .../node_modules/npm-run-path/index.js | 47 - .../node_modules/npm-run-path/license | 9 - .../node_modules/npm-run-path/package.json | 44 - .../node_modules/npm-run-path/readme.md | 115 - .../node_modules/onetime/index.d.ts | 64 - .../node_modules/onetime/index.js | 44 - .../node_modules/onetime/license | 9 - .../node_modules/onetime/package.json | 43 - .../node_modules/onetime/readme.md | 94 - .../node_modules/path-key/index.d.ts | 40 - .../node_modules/path-key/index.js | 16 - .../node_modules/path-key/license | 9 - .../node_modules/path-key/package.json | 39 - .../node_modules/path-key/readme.md | 61 - .../node_modules/picomatch/CHANGELOG.md | 136 - .../node_modules/picomatch/LICENSE | 21 - .../node_modules/picomatch/README.md | 708 ----- .../node_modules/picomatch/index.js | 3 - .../node_modules/picomatch/lib/constants.js | 179 -- .../node_modules/picomatch/lib/parse.js | 1091 ------- .../node_modules/picomatch/lib/picomatch.js | 342 --- .../node_modules/picomatch/lib/scan.js | 391 --- .../node_modules/picomatch/lib/utils.js | 64 - .../node_modules/picomatch/package.json | 81 - .../node_modules/queue-microtask/LICENSE | 20 - .../node_modules/queue-microtask/README.md | 90 - .../node_modules/queue-microtask/index.d.ts | 2 - .../node_modules/queue-microtask/index.js | 9 - .../node_modules/queue-microtask/package.json | 55 - .../reusify/.github/dependabot.yml | 7 - .../reusify/.github/workflows/ci.yml | 96 - .../node_modules/reusify/LICENSE | 22 - .../node_modules/reusify/README.md | 139 - .../node_modules/reusify/SECURITY.md | 15 - .../benchmarks/createNoCodeFunction.js | 30 - .../node_modules/reusify/benchmarks/fib.js | 13 - .../reusify/benchmarks/reuseNoCodeFunction.js | 38 - .../node_modules/reusify/eslint.config.js | 14 - .../node_modules/reusify/package.json | 50 - .../node_modules/reusify/reusify.d.ts | 14 - .../node_modules/reusify/reusify.js | 33 - .../node_modules/reusify/test.js | 66 - .../node_modules/reusify/tsconfig.json | 11 - .../node_modules/run-parallel/LICENSE | 20 - .../node_modules/run-parallel/README.md | 85 - .../node_modules/run-parallel/index.js | 51 - .../node_modules/run-parallel/package.json | 58 - .../node_modules/shebang-command/index.js | 19 - .../node_modules/shebang-command/license | 9 - .../node_modules/shebang-command/package.json | 34 - .../node_modules/shebang-command/readme.md | 34 - .../node_modules/shebang-regex/index.d.ts | 22 - .../node_modules/shebang-regex/index.js | 2 - .../node_modules/shebang-regex/license | 9 - .../node_modules/shebang-regex/package.json | 35 - .../node_modules/shebang-regex/readme.md | 33 - .../node_modules/shelljs/LICENSE | 29 - .../node_modules/shelljs/README.md | 949 ------ .../node_modules/shelljs/global.js | 15 - .../node_modules/shelljs/make.js | 57 - .../node_modules/shelljs/package.json | 90 - .../node_modules/shelljs/plugin.js | 16 - .../node_modules/shelljs/shell.js | 216 -- .../node_modules/shelljs/src/cat.js | 76 - .../node_modules/shelljs/src/cd.js | 40 - .../node_modules/shelljs/src/chmod.js | 222 -- .../node_modules/shelljs/src/cmd.js | 138 - .../node_modules/shelljs/src/common.js | 545 ---- .../node_modules/shelljs/src/cp.js | 314 -- .../node_modules/shelljs/src/dirs.js | 210 -- .../node_modules/shelljs/src/echo.js | 62 - .../node_modules/shelljs/src/error.js | 15 - .../node_modules/shelljs/src/errorCode.js | 10 - .../node_modules/shelljs/src/exec-child.js | 71 - .../node_modules/shelljs/src/exec.js | 255 -- .../node_modules/shelljs/src/find.js | 66 - .../node_modules/shelljs/src/grep.js | 198 -- .../node_modules/shelljs/src/head.js | 107 - .../node_modules/shelljs/src/ln.js | 75 - .../node_modules/shelljs/src/ls.js | 155 - .../node_modules/shelljs/src/mkdir.js | 102 - .../node_modules/shelljs/src/mv.js | 119 - .../node_modules/shelljs/src/popd.js | 1 - .../node_modules/shelljs/src/pushd.js | 1 - .../node_modules/shelljs/src/pwd.js | 16 - .../node_modules/shelljs/src/rm.js | 201 -- .../node_modules/shelljs/src/sed.js | 95 - .../node_modules/shelljs/src/set.js | 55 - .../node_modules/shelljs/src/sort.js | 98 - .../node_modules/shelljs/src/tail.js | 90 - .../node_modules/shelljs/src/tempdir.js | 75 - .../node_modules/shelljs/src/test.js | 86 - .../node_modules/shelljs/src/to.js | 38 - .../node_modules/shelljs/src/toEnd.js | 37 - .../node_modules/shelljs/src/touch.js | 117 - .../node_modules/shelljs/src/uniq.js | 93 - .../node_modules/shelljs/src/which.js | 119 - .../node_modules/signal-exit/LICENSE.txt | 16 - .../node_modules/signal-exit/README.md | 39 - .../node_modules/signal-exit/index.js | 202 -- .../node_modules/signal-exit/package.json | 38 - .../node_modules/signal-exit/signals.js | 53 - .../node_modules/strip-final-newline/index.js | 16 - .../node_modules/strip-final-newline/license | 9 - .../strip-final-newline/package.json | 40 - .../strip-final-newline/readme.md | 30 - .../node_modules/to-regex-range/LICENSE | 21 - .../node_modules/to-regex-range/README.md | 305 -- .../node_modules/to-regex-range/index.js | 288 -- .../node_modules/to-regex-range/package.json | 88 - .../node_modules/which/CHANGELOG.md | 166 -- .../node_modules/which/LICENSE | 15 - .../node_modules/which/README.md | 54 - .../node_modules/which/bin/node-which | 52 - .../node_modules/which/package.json | 43 - .../node_modules/which/which.js | 125 - .../npm-mas-mas/cmaki_identifier/npm-do | 3 - .../cmaki_identifier/package-lock.json | 480 --- .../npm-mas-mas/cmaki_identifier/package.json | 30 - .../npm-mas-mas/cmaki_identifier/setup.cmd | 7 - .../npm-mas-mas/cmaki_identifier/setup.sh | 8 - .../cmaki_identifier/tests/CMakeLists.txt | 33 - .../tests/cmaki_identifier.cpp | 345 --- .../npm-mas-mas/cmaki_scripts/.travis.yml | 5 - .../npm-mas-mas/cmaki_scripts/LICENSE | 21 - .../npm-mas-mas/cmaki_scripts/README.md | 9 - .../npm-mas-mas/cmaki_scripts/bootstrap.cmd | 15 - node_modules/npm-mas-mas/cmaki_scripts/ci.cmd | 40 - node_modules/npm-mas-mas/cmaki_scripts/ci.sh | 46 - .../npm-mas-mas/cmaki_scripts/clean.cmd | 3 - .../npm-mas-mas/cmaki_scripts/clean.sh | 16 - .../npm-mas-mas/cmaki_scripts/cmaki.cmd | 22 - .../npm-mas-mas/cmaki_scripts/cmaki.js | 193 -- .../cmaki_scripts/cmaki_depends.cmd | 7 - .../cmaki_scripts/cmaki_depends.sh | 50 - .../npm-mas-mas/cmaki_scripts/compile.cmd | 14 - .../npm-mas-mas/cmaki_scripts/compile.sh | 16 - .../cmaki_scripts/create_package.cmd | 28 - .../cmaki_scripts/create_package.sh | 15 - .../npm-mas-mas/cmaki_scripts/docker.sh | 22 - .../cmaki_scripts/head_detached.cmd | 6 - .../cmaki_scripts/head_detached.sh | 7 - .../npm-mas-mas/cmaki_scripts/init.sh | 18 - .../cmaki_scripts/make_artifact.cmd | 30 - .../cmaki_scripts/make_artifact.sh | 18 - .../npm-mas-mas/cmaki_scripts/publish.cmd | 3 - .../npm-mas-mas/cmaki_scripts/publish.sh | 3 - .../npm-mas-mas/cmaki_scripts/replace.sh | 44 - .../npm-mas-mas/cmaki_scripts/run.cmd | 5 - .../npm-mas-mas/cmaki_scripts/search.sh | 4 - .../npm-mas-mas/cmaki_scripts/setup.cmd | 64 - .../npm-mas-mas/cmaki_scripts/setup.sh | 54 - .../npm-mas-mas/cmaki_scripts/test.cmd | 15 - .../npm-mas-mas/cmaki_scripts/test.sh | 52 - .../npm-mas-mas/cmaki_scripts/upload.cmd | 29 - .../npm-mas-mas/cmaki_scripts/upload.sh | 12 - .../cmaki_scripts/upload_package.cmd | 5 - .../cmaki_scripts/upload_package.sh | 7 - node_modules/npm-mas-mas/docker-compose.yml | 32 - .../docker/Dockerfile.android-arm64 | 9 - .../npm-mas-mas/docker/Dockerfile.linux-x64 | 16 - .../npm-mas-mas/docker/Dockerfile.windows-x64 | 9 - node_modules/npm-mas-mas/docker/entrypoint.sh | 21 - node_modules/npm-mas-mas/package.json | 29 - .../npm-mas-mas/servfactor/Dockerfile | 15 - node_modules/npm-mas-mas/servfactor/Makefile | 3 - node_modules/npm-mas-mas/servfactor/NOTES.md | 4 - node_modules/npm-mas-mas/servfactor/README.md | 10 - .../npm-mas-mas/servfactor/docker-compose.yml | 11 - .../npm-mas-mas/servfactor/download.php | 58 - node_modules/npm-mas-mas/servfactor/index.php | 227 -- .../npm-mas-mas/servfactor/packages/README.md | 2 - node_modules/npm-mas-mas/servfactor/stats.php | 68 - .../npm-mas-mas/servfactor/upload.php | 76 - node_modules/npm-mas-mas/servfactor/util.php | 2584 ---------------- node_modules/npm-run-path/index.d.ts | 89 - node_modules/npm-run-path/index.js | 47 - node_modules/npm-run-path/license | 9 - node_modules/npm-run-path/package.json | 44 - node_modules/npm-run-path/readme.md | 115 - node_modules/onetime/index.d.ts | 64 - node_modules/onetime/index.js | 44 - node_modules/onetime/license | 9 - node_modules/onetime/package.json | 43 - node_modules/onetime/readme.md | 94 - node_modules/path-key/index.d.ts | 40 - node_modules/path-key/index.js | 16 - node_modules/path-key/license | 9 - node_modules/path-key/package.json | 39 - node_modules/path-key/readme.md | 61 - node_modules/picomatch/CHANGELOG.md | 136 - node_modules/picomatch/LICENSE | 21 - node_modules/picomatch/README.md | 708 ----- node_modules/picomatch/index.js | 3 - node_modules/picomatch/lib/constants.js | 179 -- node_modules/picomatch/lib/parse.js | 1091 ------- node_modules/picomatch/lib/picomatch.js | 342 --- node_modules/picomatch/lib/scan.js | 391 --- node_modules/picomatch/lib/utils.js | 64 - node_modules/picomatch/package.json | 81 - node_modules/queue-microtask/LICENSE | 20 - node_modules/queue-microtask/README.md | 90 - node_modules/queue-microtask/index.d.ts | 2 - node_modules/queue-microtask/index.js | 9 - node_modules/queue-microtask/package.json | 55 - node_modules/reusify/.github/dependabot.yml | 7 - node_modules/reusify/.github/workflows/ci.yml | 96 - node_modules/reusify/LICENSE | 22 - node_modules/reusify/README.md | 139 - node_modules/reusify/SECURITY.md | 15 - .../benchmarks/createNoCodeFunction.js | 30 - node_modules/reusify/benchmarks/fib.js | 13 - .../reusify/benchmarks/reuseNoCodeFunction.js | 38 - node_modules/reusify/eslint.config.js | 14 - node_modules/reusify/package.json | 50 - node_modules/reusify/reusify.d.ts | 14 - node_modules/reusify/reusify.js | 33 - node_modules/reusify/test.js | 66 - node_modules/reusify/tsconfig.json | 11 - node_modules/run-parallel/LICENSE | 20 - node_modules/run-parallel/README.md | 85 - node_modules/run-parallel/index.js | 51 - node_modules/run-parallel/package.json | 58 - node_modules/shebang-command/index.js | 19 - node_modules/shebang-command/license | 9 - node_modules/shebang-command/package.json | 34 - node_modules/shebang-command/readme.md | 34 - node_modules/shebang-regex/index.d.ts | 22 - node_modules/shebang-regex/index.js | 2 - node_modules/shebang-regex/license | 9 - node_modules/shebang-regex/package.json | 35 - node_modules/shebang-regex/readme.md | 33 - node_modules/shelljs/LICENSE | 29 - node_modules/shelljs/README.md | 949 ------ node_modules/shelljs/global.js | 15 - node_modules/shelljs/make.js | 57 - node_modules/shelljs/package.json | 90 - node_modules/shelljs/plugin.js | 16 - node_modules/shelljs/shell.js | 216 -- node_modules/shelljs/src/cat.js | 76 - node_modules/shelljs/src/cd.js | 40 - node_modules/shelljs/src/chmod.js | 222 -- node_modules/shelljs/src/cmd.js | 138 - node_modules/shelljs/src/common.js | 545 ---- node_modules/shelljs/src/cp.js | 314 -- node_modules/shelljs/src/dirs.js | 210 -- node_modules/shelljs/src/echo.js | 62 - node_modules/shelljs/src/error.js | 15 - node_modules/shelljs/src/errorCode.js | 10 - node_modules/shelljs/src/exec-child.js | 71 - node_modules/shelljs/src/exec.js | 255 -- node_modules/shelljs/src/find.js | 66 - node_modules/shelljs/src/grep.js | 198 -- node_modules/shelljs/src/head.js | 107 - node_modules/shelljs/src/ln.js | 75 - node_modules/shelljs/src/ls.js | 155 - node_modules/shelljs/src/mkdir.js | 102 - node_modules/shelljs/src/mv.js | 119 - node_modules/shelljs/src/popd.js | 1 - node_modules/shelljs/src/pushd.js | 1 - node_modules/shelljs/src/pwd.js | 16 - node_modules/shelljs/src/rm.js | 201 -- node_modules/shelljs/src/sed.js | 95 - node_modules/shelljs/src/set.js | 55 - node_modules/shelljs/src/sort.js | 98 - node_modules/shelljs/src/tail.js | 90 - node_modules/shelljs/src/tempdir.js | 75 - node_modules/shelljs/src/test.js | 86 - node_modules/shelljs/src/to.js | 38 - node_modules/shelljs/src/toEnd.js | 37 - node_modules/shelljs/src/touch.js | 117 - node_modules/shelljs/src/uniq.js | 93 - node_modules/shelljs/src/which.js | 119 - node_modules/signal-exit/LICENSE.txt | 16 - node_modules/signal-exit/README.md | 39 - node_modules/signal-exit/index.js | 202 -- node_modules/signal-exit/package.json | 38 - node_modules/signal-exit/signals.js | 53 - node_modules/strip-final-newline/index.js | 16 - node_modules/strip-final-newline/license | 9 - node_modules/strip-final-newline/package.json | 40 - node_modules/strip-final-newline/readme.md | 30 - node_modules/to-regex-range/LICENSE | 21 - node_modules/to-regex-range/README.md | 305 -- node_modules/to-regex-range/index.js | 288 -- node_modules/to-regex-range/package.json | 88 - node_modules/which/CHANGELOG.md | 166 -- node_modules/which/LICENSE | 15 - node_modules/which/README.md | 54 - node_modules/which/bin/node-which | 52 - node_modules/which/package.json | 43 - node_modules/which/which.js | 125 - 1038 files changed, 127 insertions(+), 104534 deletions(-) delete mode 120000 node_modules/.bin/cmaki delete mode 120000 node_modules/.bin/node-which delete mode 100644 node_modules/.package-lock.json delete mode 100644 node_modules/@nodelib/fs.scandir/LICENSE delete mode 100644 node_modules/@nodelib/fs.scandir/README.md delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/adapters/fs.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/constants.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/constants.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/common.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/settings.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/fs.js delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.d.ts delete mode 100644 node_modules/@nodelib/fs.scandir/out/utils/index.js delete mode 100644 node_modules/@nodelib/fs.scandir/package.json delete mode 100644 node_modules/@nodelib/fs.stat/LICENSE delete mode 100644 node_modules/@nodelib/fs.stat/README.md delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/adapters/fs.js delete mode 100644 node_modules/@nodelib/fs.stat/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/index.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/settings.js delete mode 100644 node_modules/@nodelib/fs.stat/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.stat/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.stat/package.json delete mode 100644 node_modules/@nodelib/fs.walk/LICENSE delete mode 100644 node_modules/@nodelib/fs.walk/README.md delete mode 100644 node_modules/@nodelib/fs.walk/out/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/index.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/async.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/index.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/stream.js delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/providers/sync.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/async.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/common.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/reader.js delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/readers/sync.js delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/settings.js delete mode 100644 node_modules/@nodelib/fs.walk/out/types/index.d.ts delete mode 100644 node_modules/@nodelib/fs.walk/out/types/index.js delete mode 100644 node_modules/@nodelib/fs.walk/package.json delete mode 100644 node_modules/braces/LICENSE delete mode 100644 node_modules/braces/README.md delete mode 100644 node_modules/braces/index.js delete mode 100644 node_modules/braces/lib/compile.js delete mode 100644 node_modules/braces/lib/constants.js delete mode 100644 node_modules/braces/lib/expand.js delete mode 100644 node_modules/braces/lib/parse.js delete mode 100644 node_modules/braces/lib/stringify.js delete mode 100644 node_modules/braces/lib/utils.js delete mode 100644 node_modules/braces/package.json delete mode 100644 node_modules/cross-spawn/LICENSE delete mode 100644 node_modules/cross-spawn/README.md delete mode 100644 node_modules/cross-spawn/index.js delete mode 100644 node_modules/cross-spawn/lib/enoent.js delete mode 100644 node_modules/cross-spawn/lib/parse.js delete mode 100644 node_modules/cross-spawn/lib/util/escape.js delete mode 100644 node_modules/cross-spawn/lib/util/readShebang.js delete mode 100644 node_modules/cross-spawn/lib/util/resolveCommand.js delete mode 100644 node_modules/cross-spawn/package.json delete mode 100644 node_modules/execa/index.d.ts delete mode 100644 node_modules/execa/index.js delete mode 100644 node_modules/execa/lib/command.js delete mode 100644 node_modules/execa/lib/error.js delete mode 100644 node_modules/execa/lib/kill.js delete mode 100644 node_modules/execa/lib/promise.js delete mode 100644 node_modules/execa/lib/stdio.js delete mode 100644 node_modules/execa/lib/stream.js delete mode 100644 node_modules/execa/license delete mode 100644 node_modules/execa/package.json delete mode 100644 node_modules/execa/readme.md delete mode 100644 node_modules/fast-glob/LICENSE delete mode 100644 node_modules/fast-glob/README.md delete mode 100644 node_modules/fast-glob/out/index.d.ts delete mode 100644 node_modules/fast-glob/out/index.js delete mode 100644 node_modules/fast-glob/out/managers/tasks.d.ts delete mode 100644 node_modules/fast-glob/out/managers/tasks.js delete mode 100644 node_modules/fast-glob/out/providers/async.d.ts delete mode 100644 node_modules/fast-glob/out/providers/async.js delete mode 100644 node_modules/fast-glob/out/providers/filters/deep.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/deep.js delete mode 100644 node_modules/fast-glob/out/providers/filters/entry.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/entry.js delete mode 100644 node_modules/fast-glob/out/providers/filters/error.d.ts delete mode 100644 node_modules/fast-glob/out/providers/filters/error.js delete mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.d.ts delete mode 100644 node_modules/fast-glob/out/providers/matchers/matcher.js delete mode 100644 node_modules/fast-glob/out/providers/matchers/partial.d.ts delete mode 100644 node_modules/fast-glob/out/providers/matchers/partial.js delete mode 100644 node_modules/fast-glob/out/providers/provider.d.ts delete mode 100644 node_modules/fast-glob/out/providers/provider.js delete mode 100644 node_modules/fast-glob/out/providers/stream.d.ts delete mode 100644 node_modules/fast-glob/out/providers/stream.js delete mode 100644 node_modules/fast-glob/out/providers/sync.d.ts delete mode 100644 node_modules/fast-glob/out/providers/sync.js delete mode 100644 node_modules/fast-glob/out/providers/transformers/entry.d.ts delete mode 100644 node_modules/fast-glob/out/providers/transformers/entry.js delete mode 100644 node_modules/fast-glob/out/readers/async.d.ts delete mode 100644 node_modules/fast-glob/out/readers/async.js delete mode 100644 node_modules/fast-glob/out/readers/reader.d.ts delete mode 100644 node_modules/fast-glob/out/readers/reader.js delete mode 100644 node_modules/fast-glob/out/readers/stream.d.ts delete mode 100644 node_modules/fast-glob/out/readers/stream.js delete mode 100644 node_modules/fast-glob/out/readers/sync.d.ts delete mode 100644 node_modules/fast-glob/out/readers/sync.js delete mode 100644 node_modules/fast-glob/out/settings.d.ts delete mode 100644 node_modules/fast-glob/out/settings.js delete mode 100644 node_modules/fast-glob/out/types/index.d.ts delete mode 100644 node_modules/fast-glob/out/types/index.js delete mode 100644 node_modules/fast-glob/out/utils/array.d.ts delete mode 100644 node_modules/fast-glob/out/utils/array.js delete mode 100644 node_modules/fast-glob/out/utils/errno.d.ts delete mode 100644 node_modules/fast-glob/out/utils/errno.js delete mode 100644 node_modules/fast-glob/out/utils/fs.d.ts delete mode 100644 node_modules/fast-glob/out/utils/fs.js delete mode 100644 node_modules/fast-glob/out/utils/index.d.ts delete mode 100644 node_modules/fast-glob/out/utils/index.js delete mode 100644 node_modules/fast-glob/out/utils/path.d.ts delete mode 100644 node_modules/fast-glob/out/utils/path.js delete mode 100644 node_modules/fast-glob/out/utils/pattern.d.ts delete mode 100644 node_modules/fast-glob/out/utils/pattern.js delete mode 100644 node_modules/fast-glob/out/utils/stream.d.ts delete mode 100644 node_modules/fast-glob/out/utils/stream.js delete mode 100644 node_modules/fast-glob/out/utils/string.d.ts delete mode 100644 node_modules/fast-glob/out/utils/string.js delete mode 100644 node_modules/fast-glob/package.json delete mode 100644 node_modules/fastq/.github/dependabot.yml delete mode 100644 node_modules/fastq/.github/workflows/ci.yml delete mode 100644 node_modules/fastq/LICENSE delete mode 100644 node_modules/fastq/README.md delete mode 100644 node_modules/fastq/SECURITY.md delete mode 100644 node_modules/fastq/bench.js delete mode 100644 node_modules/fastq/example.js delete mode 100644 node_modules/fastq/example.mjs delete mode 100644 node_modules/fastq/index.d.ts delete mode 100644 node_modules/fastq/package.json delete mode 100644 node_modules/fastq/queue.js delete mode 100644 node_modules/fastq/test/example.ts delete mode 100644 node_modules/fastq/test/promise.js delete mode 100644 node_modules/fastq/test/test.js delete mode 100644 node_modules/fastq/test/tsconfig.json delete mode 100644 node_modules/fill-range/LICENSE delete mode 100644 node_modules/fill-range/README.md delete mode 100644 node_modules/fill-range/index.js delete mode 100644 node_modules/fill-range/package.json delete mode 100644 node_modules/get-stream/buffer-stream.js delete mode 100644 node_modules/get-stream/index.d.ts delete mode 100644 node_modules/get-stream/index.js delete mode 100644 node_modules/get-stream/license delete mode 100644 node_modules/get-stream/package.json delete mode 100644 node_modules/get-stream/readme.md delete mode 100644 node_modules/glob-parent/CHANGELOG.md delete mode 100644 node_modules/glob-parent/LICENSE delete mode 100644 node_modules/glob-parent/README.md delete mode 100644 node_modules/glob-parent/index.js delete mode 100644 node_modules/glob-parent/package.json delete mode 100644 node_modules/human-signals/CHANGELOG.md delete mode 100644 node_modules/human-signals/LICENSE delete mode 100644 node_modules/human-signals/README.md delete mode 100644 node_modules/human-signals/build/src/core.js delete mode 100644 node_modules/human-signals/build/src/core.js.map delete mode 100644 node_modules/human-signals/build/src/main.d.ts delete mode 100644 node_modules/human-signals/build/src/main.js delete mode 100644 node_modules/human-signals/build/src/main.js.map delete mode 100644 node_modules/human-signals/build/src/realtime.js delete mode 100644 node_modules/human-signals/build/src/realtime.js.map delete mode 100644 node_modules/human-signals/build/src/signals.js delete mode 100644 node_modules/human-signals/build/src/signals.js.map delete mode 100644 node_modules/human-signals/package.json delete mode 100644 node_modules/is-extglob/LICENSE delete mode 100644 node_modules/is-extglob/README.md delete mode 100644 node_modules/is-extglob/index.js delete mode 100644 node_modules/is-extglob/package.json delete mode 100644 node_modules/is-glob/LICENSE delete mode 100644 node_modules/is-glob/README.md delete mode 100644 node_modules/is-glob/index.js delete mode 100644 node_modules/is-glob/package.json delete mode 100644 node_modules/is-number/LICENSE delete mode 100644 node_modules/is-number/README.md delete mode 100644 node_modules/is-number/index.js delete mode 100644 node_modules/is-number/package.json delete mode 100644 node_modules/is-stream/index.d.ts delete mode 100644 node_modules/is-stream/index.js delete mode 100644 node_modules/is-stream/license delete mode 100644 node_modules/is-stream/package.json delete mode 100644 node_modules/is-stream/readme.md delete mode 100644 node_modules/isexe/.npmignore delete mode 100644 node_modules/isexe/LICENSE delete mode 100644 node_modules/isexe/README.md delete mode 100644 node_modules/isexe/index.js delete mode 100644 node_modules/isexe/mode.js delete mode 100644 node_modules/isexe/package.json delete mode 100644 node_modules/isexe/test/basic.js delete mode 100644 node_modules/isexe/windows.js delete mode 100644 node_modules/merge-stream/LICENSE delete mode 100644 node_modules/merge-stream/README.md delete mode 100644 node_modules/merge-stream/index.js delete mode 100644 node_modules/merge-stream/package.json delete mode 100644 node_modules/merge2/LICENSE delete mode 100644 node_modules/merge2/README.md delete mode 100644 node_modules/merge2/index.js delete mode 100644 node_modules/merge2/package.json delete mode 100644 node_modules/metacommon/.travis.yml delete mode 100644 node_modules/metacommon/README.md delete mode 100644 node_modules/metacommon/cmaki.yml delete mode 100644 node_modules/metacommon/common.h delete mode 100644 node_modules/metacommon/compile.sh delete mode 100644 node_modules/metacommon/package.json delete mode 100644 node_modules/metacommon/setup.sh delete mode 100755 node_modules/micromatch/LICENSE delete mode 100644 node_modules/micromatch/README.md delete mode 100644 node_modules/micromatch/index.js delete mode 100644 node_modules/micromatch/package.json delete mode 100644 node_modules/mimic-fn/index.d.ts delete mode 100644 node_modules/mimic-fn/index.js delete mode 100644 node_modules/mimic-fn/license delete mode 100644 node_modules/mimic-fn/package.json delete mode 100644 node_modules/mimic-fn/readme.md delete mode 100644 node_modules/npm-mas-mas/.travis.yml delete mode 100644 node_modules/npm-mas-mas/LICENSE delete mode 100644 node_modules/npm-mas-mas/Makefile delete mode 100644 node_modules/npm-mas-mas/README delete mode 100644 node_modules/npm-mas-mas/cmaki/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki/GitUtils.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki/Utils.cmake delete mode 100755 node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh delete mode 100644 node_modules/npm-mas-mas/cmaki/cmaki.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki/facts/facts.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki/init/.clang-format delete mode 100644 node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl delete mode 100644 node_modules/npm-mas-mas/cmaki/junit/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_docker/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_docker/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_docker/README.md delete mode 100755 node_modules/npm-mas-mas/cmaki_docker/build.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/README.md delete mode 100755 node_modules/npm-mas-mas/cmaki_generator/build delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/build.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/build.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/common.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/compilation.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/download_package.py delete mode 100755 node_modules/npm-mas-mas/cmaki_generator/get_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/get_return_code.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/hash_version.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/junit/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/json.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/python.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/packing.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/pipeline.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/prepare.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/purge.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/raknet/Source/ReplicaManager3.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/run.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/run_test.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/run_tests.py delete mode 100755 node_modules/npm-mas-mas/cmaki_generator/save_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/sync.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/third_party.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/upload.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/upload_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_generator/utils.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/README.md delete mode 160000 node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache delete mode 120000 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki delete mode 120000 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/ReplicaManager3.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md delete mode 100755 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/npm-do delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/package-lock.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/package.json delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/setup.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/setup.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt delete mode 100644 node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/.travis.yml delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/LICENSE delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/README.md delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/ci.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/ci.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/clean.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/clean.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/cmaki.js delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/compile.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/compile.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/create_package.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/docker.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/init.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/publish.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/publish.sh delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/replace.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/run.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/search.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/setup.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/setup.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/test.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/test.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload.cmd delete mode 100755 node_modules/npm-mas-mas/cmaki_scripts/upload.sh delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd delete mode 100644 node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh delete mode 100644 node_modules/npm-mas-mas/docker-compose.yml delete mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 delete mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 delete mode 100644 node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 delete mode 100755 node_modules/npm-mas-mas/docker/entrypoint.sh delete mode 100644 node_modules/npm-mas-mas/package.json delete mode 100644 node_modules/npm-mas-mas/servfactor/Dockerfile delete mode 100644 node_modules/npm-mas-mas/servfactor/Makefile delete mode 100644 node_modules/npm-mas-mas/servfactor/NOTES.md delete mode 100644 node_modules/npm-mas-mas/servfactor/README.md delete mode 100644 node_modules/npm-mas-mas/servfactor/docker-compose.yml delete mode 100755 node_modules/npm-mas-mas/servfactor/download.php delete mode 100755 node_modules/npm-mas-mas/servfactor/index.php delete mode 100644 node_modules/npm-mas-mas/servfactor/packages/README.md delete mode 100644 node_modules/npm-mas-mas/servfactor/stats.php delete mode 100644 node_modules/npm-mas-mas/servfactor/upload.php delete mode 100755 node_modules/npm-mas-mas/servfactor/util.php delete mode 100644 node_modules/npm-run-path/index.d.ts delete mode 100644 node_modules/npm-run-path/index.js delete mode 100644 node_modules/npm-run-path/license delete mode 100644 node_modules/npm-run-path/package.json delete mode 100644 node_modules/npm-run-path/readme.md delete mode 100644 node_modules/onetime/index.d.ts delete mode 100644 node_modules/onetime/index.js delete mode 100644 node_modules/onetime/license delete mode 100644 node_modules/onetime/package.json delete mode 100644 node_modules/onetime/readme.md delete mode 100644 node_modules/path-key/index.d.ts delete mode 100644 node_modules/path-key/index.js delete mode 100644 node_modules/path-key/license delete mode 100644 node_modules/path-key/package.json delete mode 100644 node_modules/path-key/readme.md delete mode 100644 node_modules/picomatch/CHANGELOG.md delete mode 100644 node_modules/picomatch/LICENSE delete mode 100644 node_modules/picomatch/README.md delete mode 100644 node_modules/picomatch/index.js delete mode 100644 node_modules/picomatch/lib/constants.js delete mode 100644 node_modules/picomatch/lib/parse.js delete mode 100644 node_modules/picomatch/lib/picomatch.js delete mode 100644 node_modules/picomatch/lib/scan.js delete mode 100644 node_modules/picomatch/lib/utils.js delete mode 100644 node_modules/picomatch/package.json delete mode 100755 node_modules/queue-microtask/LICENSE delete mode 100644 node_modules/queue-microtask/README.md delete mode 100644 node_modules/queue-microtask/index.d.ts delete mode 100644 node_modules/queue-microtask/index.js delete mode 100644 node_modules/queue-microtask/package.json delete mode 100644 node_modules/reusify/.github/dependabot.yml delete mode 100644 node_modules/reusify/.github/workflows/ci.yml delete mode 100644 node_modules/reusify/LICENSE delete mode 100644 node_modules/reusify/README.md delete mode 100644 node_modules/reusify/SECURITY.md delete mode 100644 node_modules/reusify/benchmarks/createNoCodeFunction.js delete mode 100644 node_modules/reusify/benchmarks/fib.js delete mode 100644 node_modules/reusify/benchmarks/reuseNoCodeFunction.js delete mode 100644 node_modules/reusify/eslint.config.js delete mode 100644 node_modules/reusify/package.json delete mode 100644 node_modules/reusify/reusify.d.ts delete mode 100644 node_modules/reusify/reusify.js delete mode 100644 node_modules/reusify/test.js delete mode 100644 node_modules/reusify/tsconfig.json delete mode 100644 node_modules/run-parallel/LICENSE delete mode 100644 node_modules/run-parallel/README.md delete mode 100644 node_modules/run-parallel/index.js delete mode 100644 node_modules/run-parallel/package.json delete mode 100644 node_modules/shebang-command/index.js delete mode 100644 node_modules/shebang-command/license delete mode 100644 node_modules/shebang-command/package.json delete mode 100644 node_modules/shebang-command/readme.md delete mode 100644 node_modules/shebang-regex/index.d.ts delete mode 100644 node_modules/shebang-regex/index.js delete mode 100644 node_modules/shebang-regex/license delete mode 100644 node_modules/shebang-regex/package.json delete mode 100644 node_modules/shebang-regex/readme.md delete mode 100644 node_modules/shelljs/LICENSE delete mode 100644 node_modules/shelljs/README.md delete mode 100644 node_modules/shelljs/global.js delete mode 100644 node_modules/shelljs/make.js delete mode 100644 node_modules/shelljs/package.json delete mode 100644 node_modules/shelljs/plugin.js delete mode 100644 node_modules/shelljs/shell.js delete mode 100644 node_modules/shelljs/src/cat.js delete mode 100644 node_modules/shelljs/src/cd.js delete mode 100644 node_modules/shelljs/src/chmod.js delete mode 100644 node_modules/shelljs/src/cmd.js delete mode 100644 node_modules/shelljs/src/common.js delete mode 100644 node_modules/shelljs/src/cp.js delete mode 100644 node_modules/shelljs/src/dirs.js delete mode 100644 node_modules/shelljs/src/echo.js delete mode 100644 node_modules/shelljs/src/error.js delete mode 100644 node_modules/shelljs/src/errorCode.js delete mode 100644 node_modules/shelljs/src/exec-child.js delete mode 100644 node_modules/shelljs/src/exec.js delete mode 100644 node_modules/shelljs/src/find.js delete mode 100644 node_modules/shelljs/src/grep.js delete mode 100644 node_modules/shelljs/src/head.js delete mode 100644 node_modules/shelljs/src/ln.js delete mode 100644 node_modules/shelljs/src/ls.js delete mode 100644 node_modules/shelljs/src/mkdir.js delete mode 100644 node_modules/shelljs/src/mv.js delete mode 100644 node_modules/shelljs/src/popd.js delete mode 100644 node_modules/shelljs/src/pushd.js delete mode 100644 node_modules/shelljs/src/pwd.js delete mode 100644 node_modules/shelljs/src/rm.js delete mode 100644 node_modules/shelljs/src/sed.js delete mode 100644 node_modules/shelljs/src/set.js delete mode 100644 node_modules/shelljs/src/sort.js delete mode 100644 node_modules/shelljs/src/tail.js delete mode 100644 node_modules/shelljs/src/tempdir.js delete mode 100644 node_modules/shelljs/src/test.js delete mode 100644 node_modules/shelljs/src/to.js delete mode 100644 node_modules/shelljs/src/toEnd.js delete mode 100644 node_modules/shelljs/src/touch.js delete mode 100644 node_modules/shelljs/src/uniq.js delete mode 100644 node_modules/shelljs/src/which.js delete mode 100644 node_modules/signal-exit/LICENSE.txt delete mode 100644 node_modules/signal-exit/README.md delete mode 100644 node_modules/signal-exit/index.js delete mode 100644 node_modules/signal-exit/package.json delete mode 100644 node_modules/signal-exit/signals.js delete mode 100644 node_modules/strip-final-newline/index.js delete mode 100644 node_modules/strip-final-newline/license delete mode 100644 node_modules/strip-final-newline/package.json delete mode 100644 node_modules/strip-final-newline/readme.md delete mode 100644 node_modules/to-regex-range/LICENSE delete mode 100644 node_modules/to-regex-range/README.md delete mode 100644 node_modules/to-regex-range/index.js delete mode 100644 node_modules/to-regex-range/package.json delete mode 100644 node_modules/which/CHANGELOG.md delete mode 100644 node_modules/which/LICENSE delete mode 100644 node_modules/which/README.md delete mode 100755 node_modules/which/bin/node-which delete mode 100644 node_modules/which/package.json delete mode 100644 node_modules/which/which.js diff --git a/.gitignore b/.gitignore index a5b0124..60f424a 100644 --- a/.gitignore +++ b/.gitignore @@ -22,7 +22,15 @@ *.a *.lib -# Executables -*.exe -*.out -*.app +# Executables +*.exe +*.out +*.app + +# Node.js dependencies +node_modules/ +package-lock.json + +# Build directories +build/ +gcc/ diff --git a/include/factory.h b/include/factory.h index e32443e..8eb40a3 100644 --- a/include/factory.h +++ b/include/factory.h @@ -5,15 +5,22 @@ #ifndef _FACTORY_H_ #define _FACTORY_H_ -#include +#include +#include #include namespace dp14 { -template -class factory_registrator; - -template +template +class factory_registrator; + +/** + * Factory pattern implementation with type registration + * + * WARNING: This class is NOT thread-safe. External synchronization + * is required for concurrent access. + */ +template class factory; namespace detail { @@ -62,9 +69,9 @@ class factory key_impl keyimpl = get_key(); auto it = _map_registrators.find(keyimpl); if (it != _map_registrators.end()) - { - std::cout << "Already registered key " << keyimpl << std::endl; - throw std::exception(); + { + std::cout << "Already registered key " << keyimpl << std::endl; + throw std::runtime_error("Key already registered in factory"); } else { @@ -81,10 +88,10 @@ class factory { _map_registrators.erase(get_key()); } - else - { - std::cout << "Already unregistered key " << keyimpl << std::endl; - throw std::exception(); + else + { + std::cout << "Already unregistered key " << keyimpl << std::endl; + throw std::runtime_error("Key already unregistered in factory"); } } @@ -114,9 +121,9 @@ class factory { auto it = _map_registrators.find(keyimpl); if (it == _map_registrators.end()) - { - std::cout << "Can't found key in map: " << keyimpl << std::endl; - throw std::exception(); + { + std::cout << "Can't found key in map: " << keyimpl << std::endl; + throw std::runtime_error("Key not found in factory registry"); } return (it->second)(std::forward(data)...); } @@ -149,9 +156,14 @@ class factory_registrator return std::make_unique(std::forward(data)...); } - ~factory_registrator() - { - _f.template unregister_type(); + ~factory_registrator() + { + try { + _f.template unregister_type(); + } catch (...) { + // Destructors should not throw exceptions + // Log error or handle silently + } } protected: diff --git a/include/memoize.h b/include/memoize.h index faff575..7e18e45 100644 --- a/include/memoize.h +++ b/include/memoize.h @@ -1,14 +1,21 @@ -#ifndef _MEMOIZE_H_ -#define _MEMOIZE_H_ - +#ifndef _MEMOIZE_H_ +#define _MEMOIZE_H_ + +#include #include namespace dp14 { -template -class memoize_registrator; - -template +template +class memoize_registrator; + +/** + * Memoize pattern implementation with caching and type registration + * + * WARNING: This class is NOT thread-safe. External synchronization + * is required for concurrent access. + */ +template class memoize; namespace detail { @@ -68,9 +75,9 @@ class memoize key_impl keyimpl = get_key(); auto it = _map_registrators.find(keyimpl); if (it != _map_registrators.end()) - { - std::cout << "Already registered key " << keyimpl << std::endl; - throw std::exception(); + { + std::cout << "Already registered key " << keyimpl << std::endl; + throw std::runtime_error("Key already registered in memoize"); } else { @@ -87,10 +94,10 @@ class memoize { _map_registrators.erase(get_key()); } - else - { - std::cout << "Already unregistered key " << keyimpl << std::endl; - throw std::exception(); + else + { + std::cout << "Already unregistered key " << keyimpl << std::endl; + throw std::runtime_error("Key already unregistered in memoize"); } } @@ -107,37 +114,43 @@ class memoize return exists(get_key(), std::forward(data)...); } - template - std::shared_ptr get(TYPE_KEY keyimpl_str, Args&&... data) const - { - auto keyimpl = detail::memoize::get_hash(keyimpl_str); - key_cache key = get_base_hash(keyimpl, std::forward(data)...); - auto obj = _get(keyimpl, key, std::forward(data)...); - _map_cache_shared.emplace(key, obj); - return obj; + template + std::shared_ptr get(TYPE_KEY keyimpl_str, Args&&... data) const + { + auto keyimpl = detail::memoize::get_hash(keyimpl_str); + key_cache key = get_base_hash(keyimpl, std::forward(data)...); + auto obj = _get(keyimpl, key, std::forward(data)...); + // Only add to shared cache if not already present to avoid unnecessary entries + _map_cache_shared.emplace(key, obj); // emplace won't insert if key already exists + return obj; } - template - auto execute(TYPE_KEY keyimpl_str, Args&&... data) const - { - auto keyimpl = detail::memoize::get_hash(keyimpl_str); - key_cache key = get_base_hash(keyimpl, std::forward(data)...); - auto code = _get(keyimpl, key, std::forward(data)...); - _map_cache_shared.emplace(key, code); - return code->get(); - } - - template - void clear(TYPE_KEY keyimpl_str, Args&&... data) const - { - auto keyimpl = detail::memoize::get_hash(keyimpl_str); - key_cache key = get_base_hash(keyimpl, std::forward(data)...); - _map_cache_shared.erase(key); + template + auto execute(TYPE_KEY keyimpl_str, Args&&... data) const + { + auto keyimpl = detail::memoize::get_hash(keyimpl_str); + key_cache key = get_base_hash(keyimpl, std::forward(data)...); + auto code = _get(keyimpl, key, std::forward(data)...); + // Only add to shared cache if not already present to avoid unnecessary entries + _map_cache_shared.emplace(key, code); // emplace won't insert if key already exists + return code->get(); } - void clear() const - { - _map_cache_shared.clear(); + template + void clear(TYPE_KEY keyimpl_str, Args&&... data) const + { + auto keyimpl = detail::memoize::get_hash(keyimpl_str); + key_cache key = get_base_hash(keyimpl, std::forward(data)...); + // Clear from both caches for consistency + _map_cache.erase(key); + _map_cache_shared.erase(key); + } + + void clear() const + { + // Clear both caches + _map_cache.clear(); + _map_cache_shared.clear(); } template @@ -163,9 +176,9 @@ class memoize auto itc = _map_registrators.find(keyimpl); if (itc == _map_registrators.end()) - { - std::cout << "Can't found key in map: " << key << std::endl; - throw std::exception(); + { + std::cout << "Can't found key in map: " << key << std::endl; + throw std::runtime_error("Key not found in memoize registry"); } std::shared_ptr new_product = (itc->second)(std::forward(data)...); @@ -184,25 +197,26 @@ class memoize return _exists(keyimpl, std::forward(data)...) != _map_cache.end(); } - cache_iterator _exists(const key_impl& keyimpl, Args&&... data) const - { - key_cache key = get_base_hash(keyimpl, std::forward(data)...); - cache_iterator it = _map_cache.find(key); - cache_iterator ite = _map_cache.end(); - if (it != ite) - { - // pointer cached can be dangled - if (!it->second.expired()) - { - return it; - } - else - { - // remove expired dangled pointer - _map_cache.erase(key); - } - } - return ite; + cache_iterator _exists(const key_impl& keyimpl, Args&&... data) const + { + key_cache key = get_base_hash(keyimpl, std::forward(data)...); + cache_iterator it = _map_cache.find(key); + cache_iterator ite = _map_cache.end(); + if (it != ite) + { + // pointer cached can be dangled + if (!it->second.expired()) + { + return it; + } + else + { + // remove expired dangled pointer + // Note: This modifies mutable _map_cache which is allowed in const methods + _map_cache.erase(key); + } + } + return ite; } protected: @@ -235,9 +249,14 @@ class memoize_registrator return std::make_shared(std::forward(data)...); } - ~memoize_registrator() - { - _m.template unregister_type(); + ~memoize_registrator() + { + try { + _m.template unregister_type(); + } catch (...) { + // Destructors should not throw exceptions + // Log error or handle silently + } } protected: diff --git a/node_modules/.bin/cmaki b/node_modules/.bin/cmaki deleted file mode 120000 index 1e97214..0000000 --- a/node_modules/.bin/cmaki +++ /dev/null @@ -1 +0,0 @@ -../npm-mas-mas/cmaki_scripts/cmaki.js \ No newline at end of file diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which deleted file mode 120000 index 6f8415e..0000000 --- a/node_modules/.bin/node-which +++ /dev/null @@ -1 +0,0 @@ -../which/bin/node-which \ No newline at end of file diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json deleted file mode 100644 index 9b65d05..0000000 --- a/node_modules/.package-lock.json +++ /dev/null @@ -1,477 +0,0 @@ -{ - "name": "design-patterns-cpp14", - "version": "1.0.23", - "lockfileVersion": 3, - "requires": true, - "packages": { - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/metacommon": { - "version": "1.0.1", - "resolved": "git+ssh://git@github.com/makiolo/metacommon.git#0eeff0ebfdbe322e8c01ee9c5cfae58dbf8b1f0c", - "hasInstallScript": true, - "license": "MIT" - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/npm-mas-mas": { - "version": "0.0.1", - "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", - "dev": true, - "license": "MIT", - "dependencies": { - "shelljs": ">=0.8.5" - }, - "bin": { - "cmaki": "cmaki_scripts/cmaki.js" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shelljs": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", - "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "execa": "^5.1.1", - "fast-glob": "^3.3.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - } - } -} diff --git a/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/@nodelib/fs.scandir/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.scandir/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.scandir/README.md b/node_modules/@nodelib/fs.scandir/README.md deleted file mode 100644 index e0b218b..0000000 --- a/node_modules/@nodelib/fs.scandir/README.md +++ /dev/null @@ -1,171 +0,0 @@ -# @nodelib/fs.scandir - -> List files and directories inside the specified directory. - -## :bulb: Highlights - -The package is aimed at obtaining information about entries in the directory. - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.scandir -``` - -## Usage - -```ts -import * as fsScandir from '@nodelib/fs.scandir'; - -fsScandir.scandir('path', (error, stats) => { /* … */ }); -``` - -## API - -### .scandir(path, [optionsOrSettings], callback) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. - -```ts -fsScandir.scandir('path', (error, entries) => { /* … */ }); -fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); -fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); -``` - -### .scandirSync(path, [optionsOrSettings]) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. - -```ts -const entries = fsScandir.scandirSync('path'); -const entries = fsScandir.scandirSync('path', {}); -const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsScandir.Settings({ followSymbolicLinks: false }); - -const entries = fsScandir.scandirSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. -* `stats` (optional) — An instance of `fs.Stats` class. - -For example, the `scandir` call for `tools` directory with one directory inside: - -```ts -{ - dirent: Dirent { name: 'typedoc', /* … */ }, - name: 'typedoc', - path: 'tools/typedoc' -} -``` - -## Options - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} - -const settings = new fsScandir.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## `old` and `modern` mode - -This package has two modes that are used depending on the environment and parameters of use. - -### old - -* Node.js below `10.10` or when the `stats` option is enabled - -When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). - -### modern - -* Node.js 10.10+ and the `stats` option is disabled - -In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. - -This mode makes fewer calls to the file system. It's faster. - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts deleted file mode 100644 index 827f1db..0000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type * as fsStat from '@nodelib/fs.stat'; -import type { Dirent, ErrnoException } from '../types'; -export interface ReaddirAsynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; - (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; -} -export interface ReaddirSynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }): Dirent[]; - (filepath: string): string[]; -} -export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { - readdir: ReaddirAsynchronousMethod; - readdirSync: ReaddirSynchronousMethod; -}; -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/@nodelib/fs.scandir/out/adapters/fs.js deleted file mode 100644 index f0fe022..0000000 --- a/node_modules/@nodelib/fs.scandir/out/adapters/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/@nodelib/fs.scandir/out/constants.d.ts deleted file mode 100644 index 33f1749..0000000 --- a/node_modules/@nodelib/fs.scandir/out/constants.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/** - * IS `true` for Node.js 10.10 and greater. - */ -export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/@nodelib/fs.scandir/out/constants.js deleted file mode 100644 index 7e3d441..0000000 --- a/node_modules/@nodelib/fs.scandir/out/constants.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; -const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { - throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); -} -const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); -const SUPPORTED_MAJOR_VERSION = 10; -const SUPPORTED_MINOR_VERSION = 10; -const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; -const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; -/** - * IS `true` for Node.js 10.10 and greater. - */ -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/@nodelib/fs.scandir/out/index.d.ts deleted file mode 100644 index b9da83e..0000000 --- a/node_modules/@nodelib/fs.scandir/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Dirent, Entry } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function scandir(path: string, callback: AsyncCallback): void; -declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace scandir { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; -export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/@nodelib/fs.scandir/out/index.js deleted file mode 100644 index 99c70d3..0000000 --- a/node_modules/@nodelib/fs.scandir/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.scandirSync = exports.scandir = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function scandir(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.scandir = scandir; -function scandirSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.scandirSync = scandirSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts deleted file mode 100644 index 5829676..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -/// -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; -export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/@nodelib/fs.scandir/out/providers/async.js deleted file mode 100644 index e8e2f0a..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/async.js +++ /dev/null @@ -1,104 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const rpl = require("run-parallel"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings, callback) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - readdirWithFileTypes(directory, settings, callback); - return; - } - readdir(directory, settings, callback); -} -exports.read = read; -function readdirWithFileTypes(directory, settings, callback) { - settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const entries = dirents.map((dirent) => ({ - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - })); - if (!settings.followSymbolicLinks) { - callSuccessCallback(callback, entries); - return; - } - const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); - rpl(tasks, (rplError, rplEntries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, rplEntries); - }); - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function makeRplTaskEntry(entry, settings) { - return (done) => { - if (!entry.dirent.isSymbolicLink()) { - done(null, entry); - return; - } - settings.fs.stat(entry.path, (statError, stats) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - done(statError); - return; - } - done(null, entry); - return; - } - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - done(null, entry); - }); - }; -} -function readdir(directory, settings, callback) { - settings.fs.readdir(directory, (readdirError, names) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const tasks = names.map((name) => { - const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - return (done) => { - fsStat.stat(path, settings.fsStatSettings, (error, stats) => { - if (error !== null) { - done(error); - return; - } - const entry = { - name, - path, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - done(null, entry); - }); - }; - }); - rpl(tasks, (rplError, entries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, entries); - }); - }); -} -exports.readdir = readdir; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts deleted file mode 100644 index 2b4d08b..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/@nodelib/fs.scandir/out/providers/common.js deleted file mode 100644 index 8724cb5..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/common.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = void 0; -function joinPathSegments(a, b, separator) { - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts deleted file mode 100644 index e05c8f0..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare function read(directory: string, settings: Settings): Entry[]; -export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; -export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/@nodelib/fs.scandir/out/providers/sync.js deleted file mode 100644 index 146db34..0000000 --- a/node_modules/@nodelib/fs.scandir/out/providers/sync.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings); - } - return readdir(directory, settings); -} -exports.read = read; -function readdirWithFileTypes(directory, settings) { - const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); - return dirents.map((dirent) => { - const entry = { - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - }; - if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { - try { - const stats = settings.fs.statSync(entry.path); - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - } - catch (error) { - if (settings.throwErrorOnBrokenSymbolicLink) { - throw error; - } - } - } - return entry; - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function readdir(directory, settings) { - const names = settings.fs.readdirSync(directory); - return names.map((name) => { - const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - const stats = fsStat.statSync(entryPath, settings.fsStatSettings); - const entry = { - name, - path: entryPath, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - return entry; - }); -} -exports.readdir = readdir; diff --git a/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/@nodelib/fs.scandir/out/settings.d.ts deleted file mode 100644 index a0db115..0000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLinks: boolean; - readonly fs: fs.FileSystemAdapter; - readonly pathSegmentSeparator: string; - readonly stats: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly fsStatSettings: fsStat.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/@nodelib/fs.scandir/out/settings.js deleted file mode 100644 index 15a3e8c..0000000 --- a/node_modules/@nodelib/fs.scandir/out/settings.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.stats = this._getValue(this._options.stats, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - this.fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this.followSymbolicLinks, - fs: this.fs, - throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/@nodelib/fs.scandir/out/types/index.d.ts deleted file mode 100644 index f326c5e..0000000 --- a/node_modules/@nodelib/fs.scandir/out/types/index.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/// -import type * as fs from 'fs'; -export interface Entry { - dirent: Dirent; - name: string; - path: string; - stats?: Stats; -} -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; -export interface Dirent { - isBlockDevice: () => boolean; - isCharacterDevice: () => boolean; - isDirectory: () => boolean; - isFIFO: () => boolean; - isFile: () => boolean; - isSocket: () => boolean; - isSymbolicLink: () => boolean; - name: string; -} diff --git a/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/@nodelib/fs.scandir/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.scandir/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts deleted file mode 100644 index bb863f1..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Dirent, Stats } from '../types'; -export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/@nodelib/fs.scandir/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts deleted file mode 100644 index 1b41954..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import * as fs from './fs'; -export { fs }; diff --git a/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/@nodelib/fs.scandir/out/utils/index.js deleted file mode 100644 index f5de129..0000000 --- a/node_modules/@nodelib/fs.scandir/out/utils/index.js +++ /dev/null @@ -1,5 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fs = void 0; -const fs = require("./fs"); -exports.fs = fs; diff --git a/node_modules/@nodelib/fs.scandir/package.json b/node_modules/@nodelib/fs.scandir/package.json deleted file mode 100644 index d3a8924..0000000 --- a/node_modules/@nodelib/fs.scandir/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.scandir", - "version": "2.1.5", - "description": "List files and directories inside the specified directory", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "scandir", - "readdir", - "dirent" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4", - "@types/run-parallel": "^1.1.0" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/@nodelib/fs.stat/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.stat/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.stat/README.md b/node_modules/@nodelib/fs.stat/README.md deleted file mode 100644 index 686f047..0000000 --- a/node_modules/@nodelib/fs.stat/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# @nodelib/fs.stat - -> Get the status of a file with some features. - -## :bulb: Highlights - -Wrapper around standard method `fs.lstat` and `fs.stat` with some features. - -* :beginner: Normally follows symbolic link. -* :gear: Can safely work with broken symbolic link. - -## Install - -```console -npm install @nodelib/fs.stat -``` - -## Usage - -```ts -import * as fsStat from '@nodelib/fs.stat'; - -fsStat.stat('path', (error, stats) => { /* … */ }); -``` - -## API - -### .stat(path, [optionsOrSettings], callback) - -Returns an instance of `fs.Stats` class for provided path with standard callback-style. - -```ts -fsStat.stat('path', (error, stats) => { /* … */ }); -fsStat.stat('path', {}, (error, stats) => { /* … */ }); -fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); -``` - -### .statSync(path, [optionsOrSettings]) - -Returns an instance of `fs.Stats` class for provided path. - -```ts -const stats = fsStat.stat('path'); -const stats = fsStat.stat('path', {}); -const stats = fsStat.stat('path', new fsStat.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsStat.Settings({ followSymbolicLink: false }); - -const stats = fsStat.stat('path', settings); -``` - -## Options - -### `followSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. - -### `markSymbolicLink` - -* Type: `boolean` -* Default: `false` - -Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). - -> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; -} - -const settings = new fsStat.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts deleted file mode 100644 index 3af759c..0000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -/// -import * as fs from 'fs'; -import type { ErrnoException } from '../types'; -export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; -export declare type StatSynchronousMethod = (path: string) => fs.Stats; -export interface FileSystemAdapter { - lstat: StatAsynchronousMethod; - stat: StatAsynchronousMethod; - lstatSync: StatSynchronousMethod; - statSync: StatSynchronousMethod; -} -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/@nodelib/fs.stat/out/adapters/fs.js deleted file mode 100644 index 8dc08c8..0000000 --- a/node_modules/@nodelib/fs.stat/out/adapters/fs.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/@nodelib/fs.stat/out/index.d.ts deleted file mode 100644 index f95db99..0000000 --- a/node_modules/@nodelib/fs.stat/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Stats } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function stat(path: string, callback: AsyncCallback): void; -declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace stat { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; -export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/@nodelib/fs.stat/out/index.js deleted file mode 100644 index b23f751..0000000 --- a/node_modules/@nodelib/fs.stat/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.statSync = exports.stat = exports.Settings = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function stat(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.stat = stat; -function statSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.statSync = statSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/@nodelib/fs.stat/out/providers/async.d.ts deleted file mode 100644 index 85423ce..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type Settings from '../settings'; -import type { ErrnoException, Stats } from '../types'; -export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; -export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/@nodelib/fs.stat/out/providers/async.js deleted file mode 100644 index 983ff0e..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/async.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings, callback) { - settings.fs.lstat(path, (lstatError, lstat) => { - if (lstatError !== null) { - callFailureCallback(callback, lstatError); - return; - } - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - callSuccessCallback(callback, lstat); - return; - } - settings.fs.stat(path, (statError, stat) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - callFailureCallback(callback, statError); - return; - } - callSuccessCallback(callback, lstat); - return; - } - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - callSuccessCallback(callback, stat); - }); - }); -} -exports.read = read; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts deleted file mode 100644 index 428c3d7..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type Settings from '../settings'; -import type { Stats } from '../types'; -export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/@nodelib/fs.stat/out/providers/sync.js deleted file mode 100644 index 1521c36..0000000 --- a/node_modules/@nodelib/fs.stat/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings) { - const lstat = settings.fs.lstatSync(path); - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return lstat; - } - try { - const stat = settings.fs.statSync(path); - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - return stat; - } - catch (error) { - if (!settings.throwErrorOnBrokenSymbolicLink) { - return lstat; - } - throw error; - } -} -exports.read = read; diff --git a/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/@nodelib/fs.stat/out/settings.d.ts deleted file mode 100644 index f4b3d44..0000000 --- a/node_modules/@nodelib/fs.stat/out/settings.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLink?: boolean; - fs?: Partial; - markSymbolicLink?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLink: boolean; - readonly fs: fs.FileSystemAdapter; - readonly markSymbolicLink: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/@nodelib/fs.stat/out/settings.js deleted file mode 100644 index 111ec09..0000000 --- a/node_modules/@nodelib/fs.stat/out/settings.js +++ /dev/null @@ -1,16 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/@nodelib/fs.stat/out/types/index.d.ts deleted file mode 100644 index 74c08ed..0000000 --- a/node_modules/@nodelib/fs.stat/out/types/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import type * as fs from 'fs'; -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/@nodelib/fs.stat/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.stat/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.stat/package.json b/node_modules/@nodelib/fs.stat/package.json deleted file mode 100644 index f2540c2..0000000 --- a/node_modules/@nodelib/fs.stat/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@nodelib/fs.stat", - "version": "2.0.5", - "description": "Get the status of a file with some features", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "stat" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/@nodelib/fs.walk/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/@nodelib/fs.walk/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@nodelib/fs.walk/README.md b/node_modules/@nodelib/fs.walk/README.md deleted file mode 100644 index 6ccc08d..0000000 --- a/node_modules/@nodelib/fs.walk/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# @nodelib/fs.walk - -> A library for efficiently walking a directory recursively. - -## :bulb: Highlights - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). -* :gear: Built-in directories/files and error filtering system. -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.walk -``` - -## Usage - -```ts -import * as fsWalk from '@nodelib/fs.walk'; - -fsWalk.walk('path', (error, entries) => { /* … */ }); -``` - -## API - -### .walk(path, [optionsOrSettings], callback) - -Reads the directory recursively and asynchronously. Requires a callback function. - -> :book: If you want to use the Promise API, use `util.promisify`. - -```ts -fsWalk.walk('path', (error, entries) => { /* … */ }); -fsWalk.walk('path', {}, (error, entries) => { /* … */ }); -fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); -``` - -### .walkStream(path, [optionsOrSettings]) - -Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. - -```ts -const stream = fsWalk.walkStream('path'); -const stream = fsWalk.walkStream('path', {}); -const stream = fsWalk.walkStream('path', new fsWalk.Settings()); -``` - -### .walkSync(path, [optionsOrSettings]) - -Reads the directory recursively and synchronously. Returns an array of entries. - -```ts -const entries = fsWalk.walkSync('path'); -const entries = fsWalk.walkSync('path', {}); -const entries = fsWalk.walkSync('path', new fsWalk.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsWalk.Settings({ followSymbolicLinks: true }); - -const entries = fsWalk.walkSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. -* [`stats`] — An instance of `fs.Stats` class. - -## Options - -### basePath - -* Type: `string` -* Default: `undefined` - -By default, all paths are built relative to the root path. You can use this option to set custom root path. - -In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. - -```ts -fsWalk.walkSync('root'); // → ['root/file.txt'] -fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] -``` - -### concurrency - -* Type: `number` -* Default: `Infinity` - -The maximum number of concurrent calls to `fs.readdir`. - -> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). - -### deepFilter - -* Type: [`DeepFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the directory will be read deep or not. - -```ts -// Skip all directories that starts with `node_modules` -const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); -``` - -### entryFilter - -* Type: [`EntryFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the entry will be included to results or not. - -```ts -// Exclude all `.js` files from results -const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); -``` - -### errorFilter - -* Type: [`ErrorFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that allows you to skip errors that occur when reading directories. - -For example, you can skip `ENOENT` errors if required: - -```ts -// Skip all ENOENT errors -const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; -``` - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: `FileSystemAdapter` -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; - readdir: typeof fs.readdir; - readdirSync: typeof fs.readdirSync; -} - -const settings = new fsWalk.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/@nodelib/fs.walk/out/index.d.ts deleted file mode 100644 index 8864c7b..0000000 --- a/node_modules/@nodelib/fs.walk/out/index.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import type { Readable } from 'stream'; -import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; -import { AsyncCallback } from './providers/async'; -import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; -import type { Entry } from './types'; -declare function walk(directory: string, callback: AsyncCallback): void; -declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace walk { - function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; -declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; -export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/@nodelib/fs.walk/out/index.js deleted file mode 100644 index 1520787..0000000 --- a/node_modules/@nodelib/fs.walk/out/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function walk(directory, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); - return; - } - new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); -} -exports.walk = walk; -function walkSync(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new sync_1.default(directory, settings); - return provider.read(); -} -exports.walkSync = walkSync; -function walkStream(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new stream_1.default(directory, settings); - return provider.read(); -} -exports.walkStream = walkStream; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/@nodelib/fs.walk/out/providers/async.d.ts deleted file mode 100644 index 0f6717d..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; -export default class AsyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - private readonly _storage; - constructor(_root: string, _settings: Settings); - read(callback: AsyncCallback): void; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/@nodelib/fs.walk/out/providers/async.js deleted file mode 100644 index 51d3be5..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/async.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -class AsyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._storage = []; - } - read(callback) { - this._reader.onError((error) => { - callFailureCallback(callback, error); - }); - this._reader.onEntry((entry) => { - this._storage.push(entry); - }); - this._reader.onEnd(() => { - callSuccessCallback(callback, this._storage); - }); - this._reader.read(); - } -} -exports.default = AsyncProvider; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, entries) { - callback(null, entries); -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/@nodelib/fs.walk/out/providers/index.d.ts deleted file mode 100644 index 874f60c..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import AsyncProvider from './async'; -import StreamProvider from './stream'; -import SyncProvider from './sync'; -export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/@nodelib/fs.walk/out/providers/index.js deleted file mode 100644 index 4c2529c..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; -const async_1 = require("./async"); -exports.AsyncProvider = async_1.default; -const stream_1 = require("./stream"); -exports.StreamProvider = stream_1.default; -const sync_1 = require("./sync"); -exports.SyncProvider = sync_1.default; diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts deleted file mode 100644 index 294185f..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -/// -import { Readable } from 'stream'; -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -export default class StreamProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - protected readonly _stream: Readable; - constructor(_root: string, _settings: Settings); - read(): Readable; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/@nodelib/fs.walk/out/providers/stream.js deleted file mode 100644 index 51298b0..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/stream.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const async_1 = require("../readers/async"); -class StreamProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._stream = new stream_1.Readable({ - objectMode: true, - read: () => { }, - destroy: () => { - if (!this._reader.isDestroyed) { - this._reader.destroy(); - } - } - }); - } - read() { - this._reader.onError((error) => { - this._stream.emit('error', error); - }); - this._reader.onEntry((entry) => { - this._stream.push(entry); - }); - this._reader.onEnd(() => { - this._stream.push(null); - }); - this._reader.read(); - return this._stream; - } -} -exports.default = StreamProvider; diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts deleted file mode 100644 index 551c42e..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import SyncReader from '../readers/sync'; -import type Settings from '../settings'; -import type { Entry } from '../types'; -export default class SyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: SyncReader; - constructor(_root: string, _settings: Settings); - read(): Entry[]; -} diff --git a/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/@nodelib/fs.walk/out/providers/sync.js deleted file mode 100644 index faab6ca..0000000 --- a/node_modules/@nodelib/fs.walk/out/providers/sync.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -class SyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new sync_1.default(this._root, this._settings); - } - read() { - return this._reader.read(); - } -} -exports.default = SyncProvider; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/@nodelib/fs.walk/out/readers/async.d.ts deleted file mode 100644 index 9acf4e6..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -/// -import { EventEmitter } from 'events'; -import * as fsScandir from '@nodelib/fs.scandir'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -import Reader from './reader'; -declare type EntryEventCallback = (entry: Entry) => void; -declare type ErrorEventCallback = (error: Errno) => void; -declare type EndEventCallback = () => void; -export default class AsyncReader extends Reader { - protected readonly _settings: Settings; - protected readonly _scandir: typeof fsScandir.scandir; - protected readonly _emitter: EventEmitter; - private readonly _queue; - private _isFatalError; - private _isDestroyed; - constructor(_root: string, _settings: Settings); - read(): EventEmitter; - get isDestroyed(): boolean; - destroy(): void; - onEntry(callback: EntryEventCallback): void; - onError(callback: ErrorEventCallback): void; - onEnd(callback: EndEventCallback): void; - private _pushToQueue; - private _worker; - private _handleError; - private _handleEntry; - private _emitEntry; -} -export {}; diff --git a/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/@nodelib/fs.walk/out/readers/async.js deleted file mode 100644 index ebe8dd5..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/async.js +++ /dev/null @@ -1,97 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const events_1 = require("events"); -const fsScandir = require("@nodelib/fs.scandir"); -const fastq = require("fastq"); -const common = require("./common"); -const reader_1 = require("./reader"); -class AsyncReader extends reader_1.default { - constructor(_root, _settings) { - super(_root, _settings); - this._settings = _settings; - this._scandir = fsScandir.scandir; - this._emitter = new events_1.EventEmitter(); - this._queue = fastq(this._worker.bind(this), this._settings.concurrency); - this._isFatalError = false; - this._isDestroyed = false; - this._queue.drain = () => { - if (!this._isFatalError) { - this._emitter.emit('end'); - } - }; - } - read() { - this._isFatalError = false; - this._isDestroyed = false; - setImmediate(() => { - this._pushToQueue(this._root, this._settings.basePath); - }); - return this._emitter; - } - get isDestroyed() { - return this._isDestroyed; - } - destroy() { - if (this._isDestroyed) { - throw new Error('The reader is already destroyed'); - } - this._isDestroyed = true; - this._queue.killAndDrain(); - } - onEntry(callback) { - this._emitter.on('entry', callback); - } - onError(callback) { - this._emitter.once('error', callback); - } - onEnd(callback) { - this._emitter.once('end', callback); - } - _pushToQueue(directory, base) { - const queueItem = { directory, base }; - this._queue.push(queueItem, (error) => { - if (error !== null) { - this._handleError(error); - } - }); - } - _worker(item, done) { - this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { - if (error !== null) { - done(error, undefined); - return; - } - for (const entry of entries) { - this._handleEntry(entry, item.base); - } - done(null, undefined); - }); - } - _handleError(error) { - if (this._isDestroyed || !common.isFatalError(this._settings, error)) { - return; - } - this._isFatalError = true; - this._isDestroyed = true; - this._emitter.emit('error', error); - } - _handleEntry(entry, base) { - if (this._isDestroyed || this._isFatalError) { - return; - } - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._emitEntry(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _emitEntry(entry) { - this._emitter.emit('entry', entry); - } -} -exports.default = AsyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/@nodelib/fs.walk/out/readers/common.d.ts deleted file mode 100644 index 5985f97..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { FilterFunction } from '../settings'; -import type Settings from '../settings'; -import type { Errno } from '../types'; -export declare function isFatalError(settings: Settings, error: Errno): boolean; -export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; -export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/@nodelib/fs.walk/out/readers/common.js deleted file mode 100644 index a93572f..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/common.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; -function isFatalError(settings, error) { - if (settings.errorFilter === null) { - return true; - } - return !settings.errorFilter(error); -} -exports.isFatalError = isFatalError; -function isAppliedFilter(filter, value) { - return filter === null || filter(value); -} -exports.isAppliedFilter = isAppliedFilter; -function replacePathSegmentSeparator(filepath, separator) { - return filepath.split(/[/\\]/).join(separator); -} -exports.replacePathSegmentSeparator = replacePathSegmentSeparator; -function joinPathSegments(a, b, separator) { - if (a === '') { - return b; - } - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts deleted file mode 100644 index e1f383b..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type Settings from '../settings'; -export default class Reader { - protected readonly _root: string; - protected readonly _settings: Settings; - constructor(_root: string, _settings: Settings); -} diff --git a/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/@nodelib/fs.walk/out/readers/reader.js deleted file mode 100644 index 782f07c..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/reader.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const common = require("./common"); -class Reader { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); - } -} -exports.default = Reader; diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts deleted file mode 100644 index af41033..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry } from '../types'; -import Reader from './reader'; -export default class SyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandirSync; - private readonly _storage; - private readonly _queue; - read(): Entry[]; - private _pushToQueue; - private _handleQueue; - private _handleDirectory; - private _handleError; - private _handleEntry; - private _pushToStorage; -} diff --git a/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/@nodelib/fs.walk/out/readers/sync.js deleted file mode 100644 index 9a8d5a6..0000000 --- a/node_modules/@nodelib/fs.walk/out/readers/sync.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsScandir = require("@nodelib/fs.scandir"); -const common = require("./common"); -const reader_1 = require("./reader"); -class SyncReader extends reader_1.default { - constructor() { - super(...arguments); - this._scandir = fsScandir.scandirSync; - this._storage = []; - this._queue = new Set(); - } - read() { - this._pushToQueue(this._root, this._settings.basePath); - this._handleQueue(); - return this._storage; - } - _pushToQueue(directory, base) { - this._queue.add({ directory, base }); - } - _handleQueue() { - for (const item of this._queue.values()) { - this._handleDirectory(item.directory, item.base); - } - } - _handleDirectory(directory, base) { - try { - const entries = this._scandir(directory, this._settings.fsScandirSettings); - for (const entry of entries) { - this._handleEntry(entry, base); - } - } - catch (error) { - this._handleError(error); - } - } - _handleError(error) { - if (!common.isFatalError(this._settings, error)) { - return; - } - throw error; - } - _handleEntry(entry, base) { - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._pushToStorage(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _pushToStorage(entry) { - this._storage.push(entry); - } -} -exports.default = SyncReader; diff --git a/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/@nodelib/fs.walk/out/settings.d.ts deleted file mode 100644 index d1c4b45..0000000 --- a/node_modules/@nodelib/fs.walk/out/settings.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry, Errno } from './types'; -export declare type FilterFunction = (value: T) => boolean; -export declare type DeepFilterFunction = FilterFunction; -export declare type EntryFilterFunction = FilterFunction; -export declare type ErrorFilterFunction = FilterFunction; -export interface Options { - basePath?: string; - concurrency?: number; - deepFilter?: DeepFilterFunction; - entryFilter?: EntryFilterFunction; - errorFilter?: ErrorFilterFunction; - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly basePath?: string; - readonly concurrency: number; - readonly deepFilter: DeepFilterFunction | null; - readonly entryFilter: EntryFilterFunction | null; - readonly errorFilter: ErrorFilterFunction | null; - readonly pathSegmentSeparator: string; - readonly fsScandirSettings: fsScandir.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/@nodelib/fs.walk/out/settings.js deleted file mode 100644 index d7a85c8..0000000 --- a/node_modules/@nodelib/fs.walk/out/settings.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsScandir = require("@nodelib/fs.scandir"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); - this.deepFilter = this._getValue(this._options.deepFilter, null); - this.entryFilter = this._getValue(this._options.entryFilter, null); - this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: this._options.followSymbolicLinks, - fs: this._options.fs, - pathSegmentSeparator: this._options.pathSegmentSeparator, - stats: this._options.stats, - throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/@nodelib/fs.walk/out/types/index.d.ts deleted file mode 100644 index 6ee9bd3..0000000 --- a/node_modules/@nodelib/fs.walk/out/types/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/// -import type * as scandir from '@nodelib/fs.scandir'; -export declare type Entry = scandir.Entry; -export declare type Errno = NodeJS.ErrnoException; -export interface QueueItem { - directory: string; - base?: string; -} diff --git a/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/@nodelib/fs.walk/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/@nodelib/fs.walk/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@nodelib/fs.walk/package.json b/node_modules/@nodelib/fs.walk/package.json deleted file mode 100644 index 86bfce4..0000000 --- a/node_modules/@nodelib/fs.walk/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.walk", - "version": "1.2.8", - "description": "A library for efficiently walking a directory recursively", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "walk", - "scanner", - "crawler" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*", - "!out/**/tests/**" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" -} diff --git a/node_modules/braces/LICENSE b/node_modules/braces/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/braces/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/braces/README.md b/node_modules/braces/README.md deleted file mode 100644 index f59dd60..0000000 --- a/node_modules/braces/README.md +++ /dev/null @@ -1,586 +0,0 @@ -# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) - -> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save braces -``` - -## v3.0.0 Released!! - -See the [changelog](CHANGELOG.md) for details. - -## Why use braces? - -Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. - -- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) -- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. -- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. -- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). -- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). -- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` -- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` -- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` -- [Supports escaping](#escaping) - To prevent evaluation of special characters. - -## Usage - -The main export is a function that takes one or more brace `patterns` and `options`. - -```js -const braces = require('braces'); -// braces(patterns[, options]); - -console.log(braces(['{01..05}', '{a..e}'])); -//=> ['(0[1-5])', '([a-e])'] - -console.log(braces(['{01..05}', '{a..e}'], { expand: true })); -//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] -``` - -### Brace Expansion vs. Compilation - -By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. - -**Compiled** - -```js -console.log(braces('a/{x,y,z}/b')); -//=> ['a/(x|y|z)/b'] -console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); -//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] -``` - -**Expanded** - -Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): - -```js -console.log(braces('a/{x,y,z}/b', { expand: true })); -//=> ['a/x/b', 'a/y/b', 'a/z/b'] - -console.log(braces.expand('{01..10}')); -//=> ['01','02','03','04','05','06','07','08','09','10'] -``` - -### Lists - -Expand lists (like Bash "sets"): - -```js -console.log(braces('a/{foo,bar,baz}/*.js')); -//=> ['a/(foo|bar|baz)/*.js'] - -console.log(braces.expand('a/{foo,bar,baz}/*.js')); -//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] -``` - -### Sequences - -Expand ranges of characters (like Bash "sequences"): - -```js -console.log(braces.expand('{1..3}')); // ['1', '2', '3'] -console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] -console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] -console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] - -// supports zero-padded ranges -console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] -console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] -``` - -See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. - -### Steppped ranges - -Steps, or increments, may be used with ranges: - -```js -console.log(braces.expand('{2..10..2}')); -//=> ['2', '4', '6', '8', '10'] - -console.log(braces('{2..10..2}')); -//=> ['(2|4|6|8|10)'] -``` - -When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. - -### Nesting - -Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. - -**"Expanded" braces** - -```js -console.log(braces.expand('a{b,c,/{x,y}}/e')); -//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] - -console.log(braces.expand('a/{x,{1..5},y}/c')); -//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] -``` - -**"Optimized" braces** - -```js -console.log(braces('a{b,c,/{x,y}}/e')); -//=> ['a(b|c|/(x|y))/e'] - -console.log(braces('a/{x,{1..5},y}/c')); -//=> ['a/(x|([1-5])|y)/c'] -``` - -### Escaping - -**Escaping braces** - -A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: - -```js -console.log(braces.expand('a\\{d,c,b}e')); -//=> ['a{d,c,b}e'] - -console.log(braces.expand('a{d,c,b\\}e')); -//=> ['a{d,c,b}e'] -``` - -**Escaping commas** - -Commas inside braces may also be escaped: - -```js -console.log(braces.expand('a{b\\,c}d')); -//=> ['a{b,c}d'] - -console.log(braces.expand('a{d\\,c,b}e')); -//=> ['ad,ce', 'abe'] -``` - -**Single items** - -Following bash conventions, a brace pattern is also not expanded when it contains a single character: - -```js -console.log(braces.expand('a{b}c')); -//=> ['a{b}c'] -``` - -## Options - -### options.maxLength - -**Type**: `Number` - -**Default**: `10,000` - -**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. - -```js -console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error -``` - -### options.expand - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). - -```js -console.log(braces('a/{b,c}/d', { expand: true })); -//=> [ 'a/b/d', 'a/c/d' ] -``` - -### options.nodupes - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Remove duplicates from the returned array. - -### options.rangeLimit - -**Type**: `Number` - -**Default**: `1000` - -**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. - -You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. - -**Examples** - -```js -// pattern exceeds the "rangeLimit", so it's optimized automatically -console.log(braces.expand('{1..1000}')); -//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] - -// pattern does not exceed "rangeLimit", so it's NOT optimized -console.log(braces.expand('{1..100}')); -//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] -``` - -### options.transform - -**Type**: `Function` - -**Default**: `undefined` - -**Description**: Customize range expansion. - -**Example: Transforming non-numeric values** - -```js -const alpha = braces.expand('x/{a..e}/y', { - transform(value, index) { - // When non-numeric values are passed, "value" is a character code. - return 'foo/' + String.fromCharCode(value) + '-' + index; - }, -}); -console.log(alpha); -//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] -``` - -**Example: Transforming numeric values** - -```js -const numeric = braces.expand('{1..5}', { - transform(value) { - // when numeric values are passed, "value" is a number - return 'foo/' + value * 2; - }, -}); -console.log(numeric); -//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] -``` - -### options.quantifiers - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. - -Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) - -The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. - -**Examples** - -```js -const braces = require('braces'); -console.log(braces('a/b{1,3}/{x,y,z}')); -//=> [ 'a/b(1|3)/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); -//=> [ 'a/b{1,3}/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); -//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] -``` - -### options.keepEscaping - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Do not strip backslashes that were used for escaping from the result. - -## What is "brace expansion"? - -Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). - -In addition to "expansion", braces are also used for matching. In other words: - -- [brace expansion](#brace-expansion) is for generating new lists -- [brace matching](#brace-matching) is for filtering existing lists - -
-More about brace expansion (click to expand) - -There are two main types of brace expansion: - -1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` -2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". - -Here are some example brace patterns to illustrate how they work: - -**Sets** - -``` -{a,b,c} => a b c -{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 -``` - -**Sequences** - -``` -{1..9} => 1 2 3 4 5 6 7 8 9 -{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 -{1..20..3} => 1 4 7 10 13 16 19 -{a..j} => a b c d e f g h i j -{j..a} => j i h g f e d c b a -{a..z..3} => a d g j m p s v y -``` - -**Combination** - -Sets and sequences can be mixed together or used along with any other strings. - -``` -{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 -foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar -``` - -The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. - -## Brace matching - -In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. - -For example, the pattern `foo/{1..3}/bar` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -``` - -But not: - -``` -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -## Brace matching pitfalls - -Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. - -### tldr - -**"brace bombs"** - -- brace expansion can eat up a huge amount of processing resources -- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially -- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) - -For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. - -### The solution - -Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. - -### Geometric complexity - -At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. - -For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: - -``` -{1,2}{3,4} => (2X2) => 13 14 23 24 -{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 -``` - -But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: - -``` -{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 - 249 257 258 259 267 268 269 347 348 349 357 - 358 359 367 368 369 -``` - -Now, imagine how this complexity grows given that each element is a n-tuple: - -``` -{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) -{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) -``` - -Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. - -**More information** - -Interested in learning more about brace expansion? - -- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) -- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) -- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) - -
- -## Performance - -Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. - -### Better algorithms - -Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. - -Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. - -**The proof is in the numbers** - -Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------------------- | ------------------- | ---------------------------- | -| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | -| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | -| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | -| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | -| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | -| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | -| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | -| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | -| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | -| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | -| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | -| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | -| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | -| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | -| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | -| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | -| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | - -### Faster algorithms - -When you need expansion, braces is still much faster. - -_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------- | --------------------------- | ---------------------------- | -| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | -| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | -| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | -| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | -| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | -| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | -| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | -| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | - -If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). - -## Benchmarks - -### Running benchmarks - -Install dev dependencies: - -```bash -npm i -d && npm benchmark -``` - -### Latest results - -Braces is more accurate, without sacrificing performance. - -```bash -● expand - range (expanded) - braces x 53,167 ops/sec ±0.12% (102 runs sampled) - minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) -● expand - range (optimized for regex) - braces x 373,442 ops/sec ±0.04% (100 runs sampled) - minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) -● expand - nested ranges (expanded) - braces x 33,921 ops/sec ±0.09% (99 runs sampled) - minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) -● expand - nested ranges (optimized for regex) - braces x 287,479 ops/sec ±0.52% (98 runs sampled) - minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) -● expand - set (expanded) - braces x 238,243 ops/sec ±0.19% (97 runs sampled) - minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) -● expand - set (optimized for regex) - braces x 321,844 ops/sec ±0.10% (97 runs sampled) - minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) -● expand - nested sets (expanded) - braces x 165,371 ops/sec ±0.42% (96 runs sampled) - minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) -● expand - nested sets (optimized for regex) - braces x 242,948 ops/sec ±0.12% (99 runs sampled) - minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| ----------- | ------------------------------------------------------------- | -| 197 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [doowb](https://github.com/doowb) | -| 1 | [es128](https://github.com/es128) | -| 1 | [eush77](https://github.com/eush77) | -| 1 | [hemanth](https://github.com/hemanth) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -- [GitHub Profile](https://github.com/jonschlinkert) -- [Twitter Profile](https://twitter.com/jonschlinkert) -- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - ---- - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/braces/index.js b/node_modules/braces/index.js deleted file mode 100644 index d222c13..0000000 --- a/node_modules/braces/index.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict'; - -const stringify = require('./lib/stringify'); -const compile = require('./lib/compile'); -const expand = require('./lib/expand'); -const parse = require('./lib/parse'); - -/** - * Expand the given pattern or create a regex-compatible string. - * - * ```js - * const braces = require('braces'); - * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] - * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {String} - * @api public - */ - -const braces = (input, options = {}) => { - let output = []; - - if (Array.isArray(input)) { - for (const pattern of input) { - const result = braces.create(pattern, options); - if (Array.isArray(result)) { - output.push(...result); - } else { - output.push(result); - } - } - } else { - output = [].concat(braces.create(input, options)); - } - - if (options && options.expand === true && options.nodupes === true) { - output = [...new Set(output)]; - } - return output; -}; - -/** - * Parse the given `str` with the given `options`. - * - * ```js - * // braces.parse(pattern, [, options]); - * const ast = braces.parse('a/{b,c}/d'); - * console.log(ast); - * ``` - * @param {String} pattern Brace pattern to parse - * @param {Object} options - * @return {Object} Returns an AST - * @api public - */ - -braces.parse = (input, options = {}) => parse(input, options); - -/** - * Creates a braces string from an AST, or an AST node. - * - * ```js - * const braces = require('braces'); - * let ast = braces.parse('foo/{a,b}/bar'); - * console.log(stringify(ast.nodes[2])); //=> '{a,b}' - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.stringify = (input, options = {}) => { - if (typeof input === 'string') { - return stringify(braces.parse(input, options), options); - } - return stringify(input, options); -}; - -/** - * Compiles a brace pattern into a regex-compatible, optimized string. - * This method is called by the main [braces](#braces) function by default. - * - * ```js - * const braces = require('braces'); - * console.log(braces.compile('a/{b,c}/d')); - * //=> ['a/(b|c)/d'] - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.compile = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - return compile(input, options); -}; - -/** - * Expands a brace pattern into an array. This method is called by the - * main [braces](#braces) function when `options.expand` is true. Before - * using this method it's recommended that you read the [performance notes](#performance)) - * and advantages of using [.compile](#compile) instead. - * - * ```js - * const braces = require('braces'); - * console.log(braces.expand('a/{b,c}/d')); - * //=> ['a/b/d', 'a/c/d']; - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.expand = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - - let result = expand(input, options); - - // filter out empty strings if specified - if (options.noempty === true) { - result = result.filter(Boolean); - } - - // filter out duplicates if specified - if (options.nodupes === true) { - result = [...new Set(result)]; - } - - return result; -}; - -/** - * Processes a brace pattern and returns either an expanded array - * (if `options.expand` is true), a highly optimized regex-compatible string. - * This method is called by the main [braces](#braces) function. - * - * ```js - * const braces = require('braces'); - * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) - * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.create = (input, options = {}) => { - if (input === '' || input.length < 3) { - return [input]; - } - - return options.expand !== true - ? braces.compile(input, options) - : braces.expand(input, options); -}; - -/** - * Expose "braces" - */ - -module.exports = braces; diff --git a/node_modules/braces/lib/compile.js b/node_modules/braces/lib/compile.js deleted file mode 100644 index dce69be..0000000 --- a/node_modules/braces/lib/compile.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const utils = require('./utils'); - -const compile = (ast, options = {}) => { - const walk = (node, parent = {}) => { - const invalidBlock = utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - const invalid = invalidBlock === true || invalidNode === true; - const prefix = options.escapeInvalid === true ? '\\' : ''; - let output = ''; - - if (node.isOpen === true) { - return prefix + node.value; - } - - if (node.isClose === true) { - console.log('node.isClose', prefix, node.value); - return prefix + node.value; - } - - if (node.type === 'open') { - return invalid ? prefix + node.value : '('; - } - - if (node.type === 'close') { - return invalid ? prefix + node.value : ')'; - } - - if (node.type === 'comma') { - return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; - } - - if (node.value) { - return node.value; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); - - if (range.length !== 0) { - return args.length > 1 && range.length > 1 ? `(${range})` : range; - } - } - - if (node.nodes) { - for (const child of node.nodes) { - output += walk(child, node); - } - } - - return output; - }; - - return walk(ast); -}; - -module.exports = compile; diff --git a/node_modules/braces/lib/constants.js b/node_modules/braces/lib/constants.js deleted file mode 100644 index 2bb3b88..0000000 --- a/node_modules/braces/lib/constants.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict'; - -module.exports = { - MAX_LENGTH: 10000, - - // Digits - CHAR_0: '0', /* 0 */ - CHAR_9: '9', /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 'A', /* A */ - CHAR_LOWERCASE_A: 'a', /* a */ - CHAR_UPPERCASE_Z: 'Z', /* Z */ - CHAR_LOWERCASE_Z: 'z', /* z */ - - CHAR_LEFT_PARENTHESES: '(', /* ( */ - CHAR_RIGHT_PARENTHESES: ')', /* ) */ - - CHAR_ASTERISK: '*', /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: '&', /* & */ - CHAR_AT: '@', /* @ */ - CHAR_BACKSLASH: '\\', /* \ */ - CHAR_BACKTICK: '`', /* ` */ - CHAR_CARRIAGE_RETURN: '\r', /* \r */ - CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ - CHAR_COLON: ':', /* : */ - CHAR_COMMA: ',', /* , */ - CHAR_DOLLAR: '$', /* . */ - CHAR_DOT: '.', /* . */ - CHAR_DOUBLE_QUOTE: '"', /* " */ - CHAR_EQUAL: '=', /* = */ - CHAR_EXCLAMATION_MARK: '!', /* ! */ - CHAR_FORM_FEED: '\f', /* \f */ - CHAR_FORWARD_SLASH: '/', /* / */ - CHAR_HASH: '#', /* # */ - CHAR_HYPHEN_MINUS: '-', /* - */ - CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ - CHAR_LEFT_CURLY_BRACE: '{', /* { */ - CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ - CHAR_LINE_FEED: '\n', /* \n */ - CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ - CHAR_PERCENT: '%', /* % */ - CHAR_PLUS: '+', /* + */ - CHAR_QUESTION_MARK: '?', /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ - CHAR_RIGHT_CURLY_BRACE: '}', /* } */ - CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ - CHAR_SEMICOLON: ';', /* ; */ - CHAR_SINGLE_QUOTE: '\'', /* ' */ - CHAR_SPACE: ' ', /* */ - CHAR_TAB: '\t', /* \t */ - CHAR_UNDERSCORE: '_', /* _ */ - CHAR_VERTICAL_LINE: '|', /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ -}; diff --git a/node_modules/braces/lib/expand.js b/node_modules/braces/lib/expand.js deleted file mode 100644 index 35b2c41..0000000 --- a/node_modules/braces/lib/expand.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const stringify = require('./stringify'); -const utils = require('./utils'); - -const append = (queue = '', stash = '', enclose = false) => { - const result = []; - - queue = [].concat(queue); - stash = [].concat(stash); - - if (!stash.length) return queue; - if (!queue.length) { - return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; - } - - for (const item of queue) { - if (Array.isArray(item)) { - for (const value of item) { - result.push(append(value, stash, enclose)); - } - } else { - for (let ele of stash) { - if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; - result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); - } - } - } - return utils.flatten(result); -}; - -const expand = (ast, options = {}) => { - const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; - - const walk = (node, parent = {}) => { - node.queue = []; - - let p = parent; - let q = parent.queue; - - while (p.type !== 'brace' && p.type !== 'root' && p.parent) { - p = p.parent; - q = p.queue; - } - - if (node.invalid || node.dollar) { - q.push(append(q.pop(), stringify(node, options))); - return; - } - - if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { - q.push(append(q.pop(), ['{}'])); - return; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - - if (utils.exceedsLimit(...args, options.step, rangeLimit)) { - throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); - } - - let range = fill(...args, options); - if (range.length === 0) { - range = stringify(node, options); - } - - q.push(append(q.pop(), range)); - node.nodes = []; - return; - } - - const enclose = utils.encloseBrace(node); - let queue = node.queue; - let block = node; - - while (block.type !== 'brace' && block.type !== 'root' && block.parent) { - block = block.parent; - queue = block.queue; - } - - for (let i = 0; i < node.nodes.length; i++) { - const child = node.nodes[i]; - - if (child.type === 'comma' && node.type === 'brace') { - if (i === 1) queue.push(''); - queue.push(''); - continue; - } - - if (child.type === 'close') { - q.push(append(q.pop(), queue, enclose)); - continue; - } - - if (child.value && child.type !== 'open') { - queue.push(append(queue.pop(), child.value)); - continue; - } - - if (child.nodes) { - walk(child, node); - } - } - - return queue; - }; - - return utils.flatten(walk(ast)); -}; - -module.exports = expand; diff --git a/node_modules/braces/lib/parse.js b/node_modules/braces/lib/parse.js deleted file mode 100644 index 3a6988e..0000000 --- a/node_modules/braces/lib/parse.js +++ /dev/null @@ -1,331 +0,0 @@ -'use strict'; - -const stringify = require('./stringify'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - CHAR_BACKSLASH, /* \ */ - CHAR_BACKTICK, /* ` */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_RIGHT_SQUARE_BRACKET, /* ] */ - CHAR_DOUBLE_QUOTE, /* " */ - CHAR_SINGLE_QUOTE, /* ' */ - CHAR_NO_BREAK_SPACE, - CHAR_ZERO_WIDTH_NOBREAK_SPACE -} = require('./constants'); - -/** - * parse - */ - -const parse = (input, options = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - const opts = options || {}; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - if (input.length > max) { - throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); - } - - const ast = { type: 'root', input, nodes: [] }; - const stack = [ast]; - let block = ast; - let prev = ast; - let brackets = 0; - const length = input.length; - let index = 0; - let depth = 0; - let value; - - /** - * Helpers - */ - - const advance = () => input[index++]; - const push = node => { - if (node.type === 'text' && prev.type === 'dot') { - prev.type = 'text'; - } - - if (prev && prev.type === 'text' && node.type === 'text') { - prev.value += node.value; - return; - } - - block.nodes.push(node); - node.parent = block; - node.prev = prev; - prev = node; - return node; - }; - - push({ type: 'bos' }); - - while (index < length) { - block = stack[stack.length - 1]; - value = advance(); - - /** - * Invalid chars - */ - - if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { - continue; - } - - /** - * Escaped chars - */ - - if (value === CHAR_BACKSLASH) { - push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); - continue; - } - - /** - * Right square bracket (literal): ']' - */ - - if (value === CHAR_RIGHT_SQUARE_BRACKET) { - push({ type: 'text', value: '\\' + value }); - continue; - } - - /** - * Left square bracket: '[' - */ - - if (value === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - - let next; - - while (index < length && (next = advance())) { - value += next; - - if (next === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - continue; - } - - if (next === CHAR_BACKSLASH) { - value += advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - brackets--; - - if (brackets === 0) { - break; - } - } - } - - push({ type: 'text', value }); - continue; - } - - /** - * Parentheses - */ - - if (value === CHAR_LEFT_PARENTHESES) { - block = push({ type: 'paren', nodes: [] }); - stack.push(block); - push({ type: 'text', value }); - continue; - } - - if (value === CHAR_RIGHT_PARENTHESES) { - if (block.type !== 'paren') { - push({ type: 'text', value }); - continue; - } - block = stack.pop(); - push({ type: 'text', value }); - block = stack[stack.length - 1]; - continue; - } - - /** - * Quotes: '|"|` - */ - - if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { - const open = value; - let next; - - if (options.keepQuotes !== true) { - value = ''; - } - - while (index < length && (next = advance())) { - if (next === CHAR_BACKSLASH) { - value += next + advance(); - continue; - } - - if (next === open) { - if (options.keepQuotes === true) value += next; - break; - } - - value += next; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Left curly brace: '{' - */ - - if (value === CHAR_LEFT_CURLY_BRACE) { - depth++; - - const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; - const brace = { - type: 'brace', - open: true, - close: false, - dollar, - depth, - commas: 0, - ranges: 0, - nodes: [] - }; - - block = push(brace); - stack.push(block); - push({ type: 'open', value }); - continue; - } - - /** - * Right curly brace: '}' - */ - - if (value === CHAR_RIGHT_CURLY_BRACE) { - if (block.type !== 'brace') { - push({ type: 'text', value }); - continue; - } - - const type = 'close'; - block = stack.pop(); - block.close = true; - - push({ type, value }); - depth--; - - block = stack[stack.length - 1]; - continue; - } - - /** - * Comma: ',' - */ - - if (value === CHAR_COMMA && depth > 0) { - if (block.ranges > 0) { - block.ranges = 0; - const open = block.nodes.shift(); - block.nodes = [open, { type: 'text', value: stringify(block) }]; - } - - push({ type: 'comma', value }); - block.commas++; - continue; - } - - /** - * Dot: '.' - */ - - if (value === CHAR_DOT && depth > 0 && block.commas === 0) { - const siblings = block.nodes; - - if (depth === 0 || siblings.length === 0) { - push({ type: 'text', value }); - continue; - } - - if (prev.type === 'dot') { - block.range = []; - prev.value += value; - prev.type = 'range'; - - if (block.nodes.length !== 3 && block.nodes.length !== 5) { - block.invalid = true; - block.ranges = 0; - prev.type = 'text'; - continue; - } - - block.ranges++; - block.args = []; - continue; - } - - if (prev.type === 'range') { - siblings.pop(); - - const before = siblings[siblings.length - 1]; - before.value += prev.value + value; - prev = before; - block.ranges--; - continue; - } - - push({ type: 'dot', value }); - continue; - } - - /** - * Text - */ - - push({ type: 'text', value }); - } - - // Mark imbalanced braces and brackets as invalid - do { - block = stack.pop(); - - if (block.type !== 'root') { - block.nodes.forEach(node => { - if (!node.nodes) { - if (node.type === 'open') node.isOpen = true; - if (node.type === 'close') node.isClose = true; - if (!node.nodes) node.type = 'text'; - node.invalid = true; - } - }); - - // get the location of the block on parent.nodes (block's siblings) - const parent = stack[stack.length - 1]; - const index = parent.nodes.indexOf(block); - // replace the (invalid) block with it's nodes - parent.nodes.splice(index, 1, ...block.nodes); - } - } while (stack.length > 0); - - push({ type: 'eos' }); - return ast; -}; - -module.exports = parse; diff --git a/node_modules/braces/lib/stringify.js b/node_modules/braces/lib/stringify.js deleted file mode 100644 index 8bcf872..0000000 --- a/node_modules/braces/lib/stringify.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; - -const utils = require('./utils'); - -module.exports = (ast, options = {}) => { - const stringify = (node, parent = {}) => { - const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - let output = ''; - - if (node.value) { - if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { - return '\\' + node.value; - } - return node.value; - } - - if (node.value) { - return node.value; - } - - if (node.nodes) { - for (const child of node.nodes) { - output += stringify(child); - } - } - return output; - }; - - return stringify(ast); -}; - diff --git a/node_modules/braces/lib/utils.js b/node_modules/braces/lib/utils.js deleted file mode 100644 index d19311f..0000000 --- a/node_modules/braces/lib/utils.js +++ /dev/null @@ -1,122 +0,0 @@ -'use strict'; - -exports.isInteger = num => { - if (typeof num === 'number') { - return Number.isInteger(num); - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isInteger(Number(num)); - } - return false; -}; - -/** - * Find a node of the given type - */ - -exports.find = (node, type) => node.nodes.find(node => node.type === type); - -/** - * Find a node of the given type - */ - -exports.exceedsLimit = (min, max, step = 1, limit) => { - if (limit === false) return false; - if (!exports.isInteger(min) || !exports.isInteger(max)) return false; - return ((Number(max) - Number(min)) / Number(step)) >= limit; -}; - -/** - * Escape the given node with '\\' before node.value - */ - -exports.escapeNode = (block, n = 0, type) => { - const node = block.nodes[n]; - if (!node) return; - - if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { - if (node.escaped !== true) { - node.value = '\\' + node.value; - node.escaped = true; - } - } -}; - -/** - * Returns true if the given brace node should be enclosed in literal braces - */ - -exports.encloseBrace = node => { - if (node.type !== 'brace') return false; - if ((node.commas >> 0 + node.ranges >> 0) === 0) { - node.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a brace node is invalid. - */ - -exports.isInvalidBrace = block => { - if (block.type !== 'brace') return false; - if (block.invalid === true || block.dollar) return true; - if ((block.commas >> 0 + block.ranges >> 0) === 0) { - block.invalid = true; - return true; - } - if (block.open !== true || block.close !== true) { - block.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a node is an open or close node - */ - -exports.isOpenOrClose = node => { - if (node.type === 'open' || node.type === 'close') { - return true; - } - return node.open === true || node.close === true; -}; - -/** - * Reduce an array of text nodes. - */ - -exports.reduce = nodes => nodes.reduce((acc, node) => { - if (node.type === 'text') acc.push(node.value); - if (node.type === 'range') node.type = 'text'; - return acc; -}, []); - -/** - * Flatten an array - */ - -exports.flatten = (...args) => { - const result = []; - - const flat = arr => { - for (let i = 0; i < arr.length; i++) { - const ele = arr[i]; - - if (Array.isArray(ele)) { - flat(ele); - continue; - } - - if (ele !== undefined) { - result.push(ele); - } - } - return result; - }; - - flat(args); - return result; -}; diff --git a/node_modules/braces/package.json b/node_modules/braces/package.json deleted file mode 100644 index c3c056e..0000000 --- a/node_modules/braces/package.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "braces", - "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", - "version": "3.0.3", - "homepage": "https://github.com/micromatch/braces", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Elan Shanker (https://github.com/es128)", - "Eugene Sharygin (https://github.com/eush77)", - "hemanth.hm (http://h3manth.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/braces", - "bugs": { - "url": "https://github.com/micromatch/braces/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "mocha", - "benchmark": "node benchmark" - }, - "dependencies": { - "fill-range": "^7.1.1" - }, - "devDependencies": { - "ansi-colors": "^3.2.4", - "bash-path": "^2.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1" - }, - "keywords": [ - "alpha", - "alphabetical", - "bash", - "brace", - "braces", - "expand", - "expansion", - "filepath", - "fill", - "fs", - "glob", - "globbing", - "letter", - "match", - "matches", - "matching", - "number", - "numerical", - "path", - "range", - "ranges", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "lint": { - "reflinks": true - }, - "plugins": [ - "gulp-format-md" - ] - } -} diff --git a/node_modules/cross-spawn/LICENSE b/node_modules/cross-spawn/LICENSE deleted file mode 100644 index 8407b9a..0000000 --- a/node_modules/cross-spawn/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Made With MOXY Lda - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/cross-spawn/README.md b/node_modules/cross-spawn/README.md deleted file mode 100644 index 1ed9252..0000000 --- a/node_modules/cross-spawn/README.md +++ /dev/null @@ -1,89 +0,0 @@ -# cross-spawn - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][ci-image]][ci-url] [![Build status][appveyor-image]][appveyor-url] - -[npm-url]:https://npmjs.org/package/cross-spawn -[downloads-image]:https://img.shields.io/npm/dm/cross-spawn.svg -[npm-image]:https://img.shields.io/npm/v/cross-spawn.svg -[ci-url]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml -[ci-image]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml/badge.svg -[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn -[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg - -A cross platform solution to node's spawn and spawnSync. - -## Installation - -Node.js version 8 and up: -`$ npm install cross-spawn` - -Node.js version 7 and under: -`$ npm install cross-spawn@6` - -## Why - -Node has issues when using spawn on Windows: - -- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318) -- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix)) -- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367) -- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`) -- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149) -- No `options.shell` support on node `` where `` must not contain any arguments. -If you would like to have the shebang support improved, feel free to contribute via a pull-request. - -Remember to always test your code on Windows! - - -## Tests - -`$ npm test` -`$ npm test -- --watch` during development - - -## License - -Released under the [MIT License](https://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/cross-spawn/index.js b/node_modules/cross-spawn/index.js deleted file mode 100644 index 5509742..0000000 --- a/node_modules/cross-spawn/index.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -const cp = require('child_process'); -const parse = require('./lib/parse'); -const enoent = require('./lib/enoent'); - -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); - - return spawned; -} - -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); - - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); - - return result; -} - -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; - -module.exports._parse = parse; -module.exports._enoent = enoent; diff --git a/node_modules/cross-spawn/lib/enoent.js b/node_modules/cross-spawn/lib/enoent.js deleted file mode 100644 index da33471..0000000 --- a/node_modules/cross-spawn/lib/enoent.js +++ /dev/null @@ -1,59 +0,0 @@ -'use strict'; - -const isWin = process.platform === 'win32'; - -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); -} - -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } - - const originalEmit = cp.emit; - - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed); - - if (err) { - return originalEmit.call(cp, 'error', err); - } - } - - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} - -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } - - return null; -} - -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); - } - - return null; -} - -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; diff --git a/node_modules/cross-spawn/lib/parse.js b/node_modules/cross-spawn/lib/parse.js deleted file mode 100644 index 0129d74..0000000 --- a/node_modules/cross-spawn/lib/parse.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -const path = require('path'); -const resolveCommand = require('./util/resolveCommand'); -const escape = require('./util/escape'); -const readShebang = require('./util/readShebang'); - -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; - -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); - - const shebang = parsed.file && readShebang(parsed.file); - - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; - - return resolveCommand(parsed); - } - - return parsed.file; -} - -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } - - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); - - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); - - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); - - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } - - return parsed; -} - -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } - - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original - - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; - - // Delegate further parsing to shell or non-shell - return options.shell ? parsed : parseNonShell(parsed); -} - -module.exports = parse; diff --git a/node_modules/cross-spawn/lib/util/escape.js b/node_modules/cross-spawn/lib/util/escape.js deleted file mode 100644 index 7bf2905..0000000 --- a/node_modules/cross-spawn/lib/util/escape.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - return arg; -} - -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input - // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); - - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); - - // All other backslashes occur literally - - // Quote the whole thing: - arg = `"${arg}"`; - - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } - - return arg; -} - -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; diff --git a/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/cross-spawn/lib/util/readShebang.js deleted file mode 100644 index 5e83733..0000000 --- a/node_modules/cross-spawn/lib/util/readShebang.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const shebangCommand = require('shebang-command'); - -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - const buffer = Buffer.alloc(size); - - let fd; - - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } - - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); -} - -module.exports = readShebang; diff --git a/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/cross-spawn/lib/util/resolveCommand.js deleted file mode 100644 index 7972455..0000000 --- a/node_modules/cross-spawn/lib/util/resolveCommand.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; - -const path = require('path'); -const which = require('which'); -const getPathKey = require('path-key'); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const env = parsed.options.env || process.env; - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - // Worker threads do not have process.chdir() - const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (shouldSwitchCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } - - let resolved; - - try { - resolved = which.sync(parsed.command, { - path: env[getPathKey({ env })], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - if (shouldSwitchCwd) { - process.chdir(cwd); - } - } - - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } - - return resolved; -} - -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} - -module.exports = resolveCommand; diff --git a/node_modules/cross-spawn/package.json b/node_modules/cross-spawn/package.json deleted file mode 100644 index 24b2eb4..0000000 --- a/node_modules/cross-spawn/package.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "name": "cross-spawn", - "version": "7.0.6", - "description": "Cross platform child_process#spawn and child_process#spawnSync", - "keywords": [ - "spawn", - "spawnSync", - "windows", - "cross-platform", - "path-ext", - "shebang", - "cmd", - "execute" - ], - "author": "André Cruz ", - "homepage": "https://github.com/moxystudio/node-cross-spawn", - "repository": { - "type": "git", - "url": "git@github.com:moxystudio/node-cross-spawn.git" - }, - "license": "MIT", - "main": "index.js", - "files": [ - "lib" - ], - "scripts": { - "lint": "eslint .", - "test": "jest --env node --coverage", - "prerelease": "npm t && npm run lint", - "release": "standard-version", - "postrelease": "git push --follow-tags origin HEAD && npm publish" - }, - "husky": { - "hooks": { - "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", - "pre-commit": "lint-staged" - } - }, - "lint-staged": { - "*.js": [ - "eslint --fix", - "git add" - ] - }, - "commitlint": { - "extends": [ - "@commitlint/config-conventional" - ] - }, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "devDependencies": { - "@commitlint/cli": "^8.1.0", - "@commitlint/config-conventional": "^8.1.0", - "babel-core": "^6.26.3", - "babel-jest": "^24.9.0", - "babel-preset-moxy": "^3.1.0", - "eslint": "^5.16.0", - "eslint-config-moxy": "^7.1.0", - "husky": "^3.0.5", - "jest": "^24.9.0", - "lint-staged": "^9.2.5", - "mkdirp": "^0.5.1", - "rimraf": "^3.0.0", - "standard-version": "^9.5.0" - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/execa/index.d.ts b/node_modules/execa/index.d.ts deleted file mode 100644 index 417d535..0000000 --- a/node_modules/execa/index.d.ts +++ /dev/null @@ -1,564 +0,0 @@ -/// -import {ChildProcess} from 'child_process'; -import {Stream, Readable as ReadableStream} from 'stream'; - -declare namespace execa { - type StdioOption = - | 'pipe' - | 'ipc' - | 'ignore' - | 'inherit' - | Stream - | number - | undefined; - - interface CommonOptions { - /** - Kill the spawned process when the parent process exits unless either: - - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) - - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit - - @default true - */ - readonly cleanup?: boolean; - - /** - Prefer locally installed binaries when looking for a binary to execute. - - If you `$ npm install foo`, you can then `execa('foo')`. - - @default false - */ - readonly preferLocal?: boolean; - - /** - Preferred path to find locally installed binaries in (use with `preferLocal`). - - @default process.cwd() - */ - readonly localDir?: string; - - /** - Path to the Node.js executable to use in child processes. - - This can be either an absolute path or a path relative to the `cwd` option. - - Requires `preferLocal` to be `true`. - - For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. - - @default process.execPath - */ - readonly execPath?: string; - - /** - Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected. - - If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data. - - @default true - */ - readonly buffer?: boolean; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stdin?: StdioOption; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stdout?: StdioOption; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stderr?: StdioOption; - - /** - Setting this to `false` resolves the promise with the error instead of rejecting it. - - @default true - */ - readonly reject?: boolean; - - /** - Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved. - - @default false - */ - readonly all?: boolean; - - /** - Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. - - @default true - */ - readonly stripFinalNewline?: boolean; - - /** - Set to `false` if you don't want to extend the environment variables when providing the `env` property. - - @default true - */ - readonly extendEnv?: boolean; - - /** - Current working directory of the child process. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. - - @default process.env - */ - readonly env?: NodeJS.ProcessEnv; - - /** - Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. - */ - readonly argv0?: string; - - /** - Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. - - @default 'pipe' - */ - readonly stdio?: 'pipe' | 'ignore' | 'inherit' | readonly StdioOption[]; - - /** - Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execa.node()`: - - `json`: Uses `JSON.stringify()` and `JSON.parse()`. - - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) - - Requires Node.js `13.2.0` or later. - - [More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) - - @default 'json' - */ - readonly serialization?: 'json' | 'advanced'; - - /** - Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). - - @default false - */ - readonly detached?: boolean; - - /** - Sets the user identity of the process. - */ - readonly uid?: number; - - /** - Sets the group identity of the process. - */ - readonly gid?: number; - - /** - If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. - - We recommend against using this option since it is: - - not cross-platform, encouraging shell-specific syntax. - - slower, because of the additional shell interpretation. - - unsafe, potentially allowing command injection. - - @default false - */ - readonly shell?: boolean | string; - - /** - Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. - - @default 'utf8' - */ - readonly encoding?: EncodingType; - - /** - If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds. - - @default 0 - */ - readonly timeout?: number; - - /** - Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB. - - @default 100_000_000 - */ - readonly maxBuffer?: number; - - /** - Signal value to be used when the spawned process will be killed. - - @default 'SIGTERM' - */ - readonly killSignal?: string | number; - - /** - If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. - - @default false - */ - readonly windowsVerbatimArguments?: boolean; - - /** - On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. - - @default true - */ - readonly windowsHide?: boolean; - } - - interface Options extends CommonOptions { - /** - Write some input to the `stdin` of your binary. - */ - readonly input?: string | Buffer | ReadableStream; - } - - interface SyncOptions extends CommonOptions { - /** - Write some input to the `stdin` of your binary. - */ - readonly input?: string | Buffer; - } - - interface NodeOptions extends Options { - /** - The Node.js executable to use. - - @default process.execPath - */ - readonly nodePath?: string; - - /** - List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. - - @default process.execArgv - */ - readonly nodeOptions?: string[]; - } - - interface ExecaReturnBase { - /** - The file and arguments that were run, for logging purposes. - - This is not escaped and should not be executed directly as a process, including using `execa()` or `execa.command()`. - */ - command: string; - - /** - Same as `command` but escaped. - - This is meant to be copy and pasted into a shell, for debugging purposes. - Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execa.command()`. - */ - escapedCommand: string; - - /** - The numeric exit code of the process that was run. - */ - exitCode: number; - - /** - The output of the process on stdout. - */ - stdout: StdoutStderrType; - - /** - The output of the process on stderr. - */ - stderr: StdoutStderrType; - - /** - Whether the process failed to run. - */ - failed: boolean; - - /** - Whether the process timed out. - */ - timedOut: boolean; - - /** - Whether the process was killed. - */ - killed: boolean; - - /** - The name of the signal that was used to terminate the process. For example, `SIGFPE`. - - If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. - */ - signal?: string; - - /** - A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. - - If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. - */ - signalDescription?: string; - } - - interface ExecaSyncReturnValue - extends ExecaReturnBase { - } - - /** - Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. - - The child process fails when: - - its exit code is not `0` - - it was killed with a signal - - timing out - - being canceled - - there's not enough memory or there are already too many child processes - */ - interface ExecaReturnValue - extends ExecaSyncReturnValue { - /** - The output of the process with `stdout` and `stderr` interleaved. - - This is `undefined` if either: - - the `all` option is `false` (default value) - - `execa.sync()` was used - */ - all?: StdoutErrorType; - - /** - Whether the process was canceled. - */ - isCanceled: boolean; - } - - interface ExecaSyncError - extends Error, - ExecaReturnBase { - /** - Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored. - - The child process stderr then stdout are appended to the end, separated with newlines and not interleaved. - */ - message: string; - - /** - This is the same as the `message` property except it does not include the child process stdout/stderr. - */ - shortMessage: string; - - /** - Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. - - This is `undefined` unless the child process exited due to an `error` event or a timeout. - */ - originalMessage?: string; - } - - interface ExecaError - extends ExecaSyncError { - /** - The output of the process with `stdout` and `stderr` interleaved. - - This is `undefined` if either: - - the `all` option is `false` (default value) - - `execa.sync()` was used - */ - all?: StdoutErrorType; - - /** - Whether the process was canceled. - */ - isCanceled: boolean; - } - - interface KillOptions { - /** - Milliseconds to wait for the child process to terminate before sending `SIGKILL`. - - Can be disabled with `false`. - - @default 5000 - */ - forceKillAfterTimeout?: number | false; - } - - interface ExecaChildPromise { - /** - Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). - - This is `undefined` if either: - - the `all` option is `false` (the default value) - - both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) - */ - all?: ReadableStream; - - catch( - onRejected?: (reason: ExecaError) => ResultType | PromiseLike - ): Promise | ResultType>; - - /** - Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. - */ - kill(signal?: string, options?: KillOptions): void; - - /** - Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. - */ - cancel(): void; - } - - type ExecaChildProcess = ChildProcess & - ExecaChildPromise & - Promise>; -} - -declare const execa: { - /** - Execute a file. - - Think of this as a mix of `child_process.execFile` and `child_process.spawn`. - - @param file - The program/script to execute. - @param arguments - Arguments to pass to `file` on execution. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - - @example - ``` - import execa = require('execa'); - - (async () => { - const {stdout} = await execa('echo', ['unicorns']); - console.log(stdout); - //=> 'unicorns' - - // Cancelling a spawned process - - const subprocess = execa('node'); - - setTimeout(() => { - subprocess.cancel() - }, 1000); - - try { - await subprocess; - } catch (error) { - console.log(subprocess.killed); // true - console.log(error.isCanceled); // true - } - })(); - - // Pipe the child process stdout to the current stdout - execa('echo', ['unicorns']).stdout.pipe(process.stdout); - ``` - */ - ( - file: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - ( - file: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - (file: string, options?: execa.Options): execa.ExecaChildProcess; - (file: string, options?: execa.Options): execa.ExecaChildProcess< - Buffer - >; - - /** - Execute a file synchronously. - - This method throws an `Error` if the command fails. - - @param file - The program/script to execute. - @param arguments - Arguments to pass to `file` on execution. - @returns A result `Object` with `stdout` and `stderr` properties. - */ - sync( - file: string, - arguments?: readonly string[], - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - sync( - file: string, - arguments?: readonly string[], - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - sync(file: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - sync( - file: string, - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - - /** - Same as `execa()` except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. - - If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. - - The `shell` option must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. - - @param command - The program/script to execute and its arguments. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - - @example - ``` - import execa = require('execa'); - - (async () => { - const {stdout} = await execa.command('echo unicorns'); - console.log(stdout); - //=> 'unicorns' - })(); - ``` - */ - command(command: string, options?: execa.Options): execa.ExecaChildProcess; - command(command: string, options?: execa.Options): execa.ExecaChildProcess; - - /** - Same as `execa.command()` but synchronous. - - @param command - The program/script to execute and its arguments. - @returns A result `Object` with `stdout` and `stderr` properties. - */ - commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - - /** - Execute a Node.js script as a child process. - - Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): - - the current Node version and options are used. This can be overridden using the `nodePath` and `nodeArguments` options. - - the `shell` option cannot be used - - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) - - @param scriptPath - Node.js script to execute. - @param arguments - Arguments to pass to `scriptPath` on execution. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - */ - node( - scriptPath: string, - arguments?: readonly string[], - options?: execa.NodeOptions - ): execa.ExecaChildProcess; - node( - scriptPath: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; - node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; -}; - -export = execa; diff --git a/node_modules/execa/index.js b/node_modules/execa/index.js deleted file mode 100644 index 6fc9f12..0000000 --- a/node_modules/execa/index.js +++ /dev/null @@ -1,268 +0,0 @@ -'use strict'; -const path = require('path'); -const childProcess = require('child_process'); -const crossSpawn = require('cross-spawn'); -const stripFinalNewline = require('strip-final-newline'); -const npmRunPath = require('npm-run-path'); -const onetime = require('onetime'); -const makeError = require('./lib/error'); -const normalizeStdio = require('./lib/stdio'); -const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = require('./lib/kill'); -const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = require('./lib/stream'); -const {mergePromise, getSpawnedPromise} = require('./lib/promise'); -const {joinCommand, parseCommand, getEscapedCommand} = require('./lib/command'); - -const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100; - -const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => { - const env = extendEnv ? {...process.env, ...envOption} : envOption; - - if (preferLocal) { - return npmRunPath.env({env, cwd: localDir, execPath}); - } - - return env; -}; - -const handleArguments = (file, args, options = {}) => { - const parsed = crossSpawn._parse(file, args, options); - file = parsed.command; - args = parsed.args; - options = parsed.options; - - options = { - maxBuffer: DEFAULT_MAX_BUFFER, - buffer: true, - stripFinalNewline: true, - extendEnv: true, - preferLocal: false, - localDir: options.cwd || process.cwd(), - execPath: process.execPath, - encoding: 'utf8', - reject: true, - cleanup: true, - all: false, - windowsHide: true, - ...options - }; - - options.env = getEnv(options); - - options.stdio = normalizeStdio(options); - - if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') { - // #116 - args.unshift('/q'); - } - - return {file, args, options, parsed}; -}; - -const handleOutput = (options, value, error) => { - if (typeof value !== 'string' && !Buffer.isBuffer(value)) { - // When `execa.sync()` errors, we normalize it to '' to mimic `execa()` - return error === undefined ? undefined : ''; - } - - if (options.stripFinalNewline) { - return stripFinalNewline(value); - } - - return value; -}; - -const execa = (file, args, options) => { - const parsed = handleArguments(file, args, options); - const command = joinCommand(file, args); - const escapedCommand = getEscapedCommand(file, args); - - validateTimeout(parsed.options); - - let spawned; - try { - spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options); - } catch (error) { - // Ensure the returned error is always both a promise and a child process - const dummySpawned = new childProcess.ChildProcess(); - const errorPromise = Promise.reject(makeError({ - error, - stdout: '', - stderr: '', - all: '', - command, - escapedCommand, - parsed, - timedOut: false, - isCanceled: false, - killed: false - })); - return mergePromise(dummySpawned, errorPromise); - } - - const spawnedPromise = getSpawnedPromise(spawned); - const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise); - const processDone = setExitHandler(spawned, parsed.options, timedPromise); - - const context = {isCanceled: false}; - - spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned)); - spawned.cancel = spawnedCancel.bind(null, spawned, context); - - const handlePromise = async () => { - const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone); - const stdout = handleOutput(parsed.options, stdoutResult); - const stderr = handleOutput(parsed.options, stderrResult); - const all = handleOutput(parsed.options, allResult); - - if (error || exitCode !== 0 || signal !== null) { - const returnedError = makeError({ - error, - exitCode, - signal, - stdout, - stderr, - all, - command, - escapedCommand, - parsed, - timedOut, - isCanceled: context.isCanceled, - killed: spawned.killed - }); - - if (!parsed.options.reject) { - return returnedError; - } - - throw returnedError; - } - - return { - command, - escapedCommand, - exitCode: 0, - stdout, - stderr, - all, - failed: false, - timedOut: false, - isCanceled: false, - killed: false - }; - }; - - const handlePromiseOnce = onetime(handlePromise); - - handleInput(spawned, parsed.options.input); - - spawned.all = makeAllStream(spawned, parsed.options); - - return mergePromise(spawned, handlePromiseOnce); -}; - -module.exports = execa; - -module.exports.sync = (file, args, options) => { - const parsed = handleArguments(file, args, options); - const command = joinCommand(file, args); - const escapedCommand = getEscapedCommand(file, args); - - validateInputSync(parsed.options); - - let result; - try { - result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options); - } catch (error) { - throw makeError({ - error, - stdout: '', - stderr: '', - all: '', - command, - escapedCommand, - parsed, - timedOut: false, - isCanceled: false, - killed: false - }); - } - - const stdout = handleOutput(parsed.options, result.stdout, result.error); - const stderr = handleOutput(parsed.options, result.stderr, result.error); - - if (result.error || result.status !== 0 || result.signal !== null) { - const error = makeError({ - stdout, - stderr, - error: result.error, - signal: result.signal, - exitCode: result.status, - command, - escapedCommand, - parsed, - timedOut: result.error && result.error.code === 'ETIMEDOUT', - isCanceled: false, - killed: result.signal !== null - }); - - if (!parsed.options.reject) { - return error; - } - - throw error; - } - - return { - command, - escapedCommand, - exitCode: 0, - stdout, - stderr, - failed: false, - timedOut: false, - isCanceled: false, - killed: false - }; -}; - -module.exports.command = (command, options) => { - const [file, ...args] = parseCommand(command); - return execa(file, args, options); -}; - -module.exports.commandSync = (command, options) => { - const [file, ...args] = parseCommand(command); - return execa.sync(file, args, options); -}; - -module.exports.node = (scriptPath, args, options = {}) => { - if (args && !Array.isArray(args) && typeof args === 'object') { - options = args; - args = []; - } - - const stdio = normalizeStdio.node(options); - const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect')); - - const { - nodePath = process.execPath, - nodeOptions = defaultExecArgv - } = options; - - return execa( - nodePath, - [ - ...nodeOptions, - scriptPath, - ...(Array.isArray(args) ? args : []) - ], - { - ...options, - stdin: undefined, - stdout: undefined, - stderr: undefined, - stdio, - shell: false - } - ); -}; diff --git a/node_modules/execa/lib/command.js b/node_modules/execa/lib/command.js deleted file mode 100644 index 859b006..0000000 --- a/node_modules/execa/lib/command.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const normalizeArgs = (file, args = []) => { - if (!Array.isArray(args)) { - return [file]; - } - - return [file, ...args]; -}; - -const NO_ESCAPE_REGEXP = /^[\w.-]+$/; -const DOUBLE_QUOTES_REGEXP = /"/g; - -const escapeArg = arg => { - if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) { - return arg; - } - - return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`; -}; - -const joinCommand = (file, args) => { - return normalizeArgs(file, args).join(' '); -}; - -const getEscapedCommand = (file, args) => { - return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' '); -}; - -const SPACES_REGEXP = / +/g; - -// Handle `execa.command()` -const parseCommand = command => { - const tokens = []; - for (const token of command.trim().split(SPACES_REGEXP)) { - // Allow spaces to be escaped by a backslash if not meant as a delimiter - const previousToken = tokens[tokens.length - 1]; - if (previousToken && previousToken.endsWith('\\')) { - // Merge previous token with current one - tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`; - } else { - tokens.push(token); - } - } - - return tokens; -}; - -module.exports = { - joinCommand, - getEscapedCommand, - parseCommand -}; diff --git a/node_modules/execa/lib/error.js b/node_modules/execa/lib/error.js deleted file mode 100644 index 4214467..0000000 --- a/node_modules/execa/lib/error.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict'; -const {signalsByName} = require('human-signals'); - -const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => { - if (timedOut) { - return `timed out after ${timeout} milliseconds`; - } - - if (isCanceled) { - return 'was canceled'; - } - - if (errorCode !== undefined) { - return `failed with ${errorCode}`; - } - - if (signal !== undefined) { - return `was killed with ${signal} (${signalDescription})`; - } - - if (exitCode !== undefined) { - return `failed with exit code ${exitCode}`; - } - - return 'failed'; -}; - -const makeError = ({ - stdout, - stderr, - all, - error, - signal, - exitCode, - command, - escapedCommand, - timedOut, - isCanceled, - killed, - parsed: {options: {timeout}} -}) => { - // `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`. - // We normalize them to `undefined` - exitCode = exitCode === null ? undefined : exitCode; - signal = signal === null ? undefined : signal; - const signalDescription = signal === undefined ? undefined : signalsByName[signal].description; - - const errorCode = error && error.code; - - const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}); - const execaMessage = `Command ${prefix}: ${command}`; - const isError = Object.prototype.toString.call(error) === '[object Error]'; - const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage; - const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n'); - - if (isError) { - error.originalMessage = error.message; - error.message = message; - } else { - error = new Error(message); - } - - error.shortMessage = shortMessage; - error.command = command; - error.escapedCommand = escapedCommand; - error.exitCode = exitCode; - error.signal = signal; - error.signalDescription = signalDescription; - error.stdout = stdout; - error.stderr = stderr; - - if (all !== undefined) { - error.all = all; - } - - if ('bufferedData' in error) { - delete error.bufferedData; - } - - error.failed = true; - error.timedOut = Boolean(timedOut); - error.isCanceled = isCanceled; - error.killed = killed && !timedOut; - - return error; -}; - -module.exports = makeError; diff --git a/node_modules/execa/lib/kill.js b/node_modules/execa/lib/kill.js deleted file mode 100644 index 287a142..0000000 --- a/node_modules/execa/lib/kill.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict'; -const os = require('os'); -const onExit = require('signal-exit'); - -const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5; - -// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior -const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => { - const killResult = kill(signal); - setKillTimeout(kill, signal, options, killResult); - return killResult; -}; - -const setKillTimeout = (kill, signal, options, killResult) => { - if (!shouldForceKill(signal, options, killResult)) { - return; - } - - const timeout = getForceKillAfterTimeout(options); - const t = setTimeout(() => { - kill('SIGKILL'); - }, timeout); - - // Guarded because there's no `.unref()` when `execa` is used in the renderer - // process in Electron. This cannot be tested since we don't run tests in - // Electron. - // istanbul ignore else - if (t.unref) { - t.unref(); - } -}; - -const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => { - return isSigterm(signal) && forceKillAfterTimeout !== false && killResult; -}; - -const isSigterm = signal => { - return signal === os.constants.signals.SIGTERM || - (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM'); -}; - -const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => { - if (forceKillAfterTimeout === true) { - return DEFAULT_FORCE_KILL_TIMEOUT; - } - - if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) { - throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`); - } - - return forceKillAfterTimeout; -}; - -// `childProcess.cancel()` -const spawnedCancel = (spawned, context) => { - const killResult = spawned.kill(); - - if (killResult) { - context.isCanceled = true; - } -}; - -const timeoutKill = (spawned, signal, reject) => { - spawned.kill(signal); - reject(Object.assign(new Error('Timed out'), {timedOut: true, signal})); -}; - -// `timeout` option handling -const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => { - if (timeout === 0 || timeout === undefined) { - return spawnedPromise; - } - - let timeoutId; - const timeoutPromise = new Promise((resolve, reject) => { - timeoutId = setTimeout(() => { - timeoutKill(spawned, killSignal, reject); - }, timeout); - }); - - const safeSpawnedPromise = spawnedPromise.finally(() => { - clearTimeout(timeoutId); - }); - - return Promise.race([timeoutPromise, safeSpawnedPromise]); -}; - -const validateTimeout = ({timeout}) => { - if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) { - throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`); - } -}; - -// `cleanup` option handling -const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => { - if (!cleanup || detached) { - return timedPromise; - } - - const removeExitHandler = onExit(() => { - spawned.kill(); - }); - - return timedPromise.finally(() => { - removeExitHandler(); - }); -}; - -module.exports = { - spawnedKill, - spawnedCancel, - setupTimeout, - validateTimeout, - setExitHandler -}; diff --git a/node_modules/execa/lib/promise.js b/node_modules/execa/lib/promise.js deleted file mode 100644 index bd9d523..0000000 --- a/node_modules/execa/lib/promise.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict'; - -const nativePromisePrototype = (async () => {})().constructor.prototype; -const descriptors = ['then', 'catch', 'finally'].map(property => [ - property, - Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property) -]); - -// The return value is a mixin of `childProcess` and `Promise` -const mergePromise = (spawned, promise) => { - for (const [property, descriptor] of descriptors) { - // Starting the main `promise` is deferred to avoid consuming streams - const value = typeof promise === 'function' ? - (...args) => Reflect.apply(descriptor.value, promise(), args) : - descriptor.value.bind(promise); - - Reflect.defineProperty(spawned, property, {...descriptor, value}); - } - - return spawned; -}; - -// Use promises instead of `child_process` events -const getSpawnedPromise = spawned => { - return new Promise((resolve, reject) => { - spawned.on('exit', (exitCode, signal) => { - resolve({exitCode, signal}); - }); - - spawned.on('error', error => { - reject(error); - }); - - if (spawned.stdin) { - spawned.stdin.on('error', error => { - reject(error); - }); - } - }); -}; - -module.exports = { - mergePromise, - getSpawnedPromise -}; - diff --git a/node_modules/execa/lib/stdio.js b/node_modules/execa/lib/stdio.js deleted file mode 100644 index 45129ed..0000000 --- a/node_modules/execa/lib/stdio.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const aliases = ['stdin', 'stdout', 'stderr']; - -const hasAlias = options => aliases.some(alias => options[alias] !== undefined); - -const normalizeStdio = options => { - if (!options) { - return; - } - - const {stdio} = options; - - if (stdio === undefined) { - return aliases.map(alias => options[alias]); - } - - if (hasAlias(options)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`); - } - - if (typeof stdio === 'string') { - return stdio; - } - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } - - const length = Math.max(stdio.length, aliases.length); - return Array.from({length}, (value, index) => stdio[index]); -}; - -module.exports = normalizeStdio; - -// `ipc` is pushed unless it is already present -module.exports.node = options => { - const stdio = normalizeStdio(options); - - if (stdio === 'ipc') { - return 'ipc'; - } - - if (stdio === undefined || typeof stdio === 'string') { - return [stdio, stdio, stdio, 'ipc']; - } - - if (stdio.includes('ipc')) { - return stdio; - } - - return [...stdio, 'ipc']; -}; diff --git a/node_modules/execa/lib/stream.js b/node_modules/execa/lib/stream.js deleted file mode 100644 index d445dd4..0000000 --- a/node_modules/execa/lib/stream.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict'; -const isStream = require('is-stream'); -const getStream = require('get-stream'); -const mergeStream = require('merge-stream'); - -// `input` option -const handleInput = (spawned, input) => { - // Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852 - // @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0 - if (input === undefined || spawned.stdin === undefined) { - return; - } - - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); - } -}; - -// `all` interleaves `stdout` and `stderr` -const makeAllStream = (spawned, {all}) => { - if (!all || (!spawned.stdout && !spawned.stderr)) { - return; - } - - const mixed = mergeStream(); - - if (spawned.stdout) { - mixed.add(spawned.stdout); - } - - if (spawned.stderr) { - mixed.add(spawned.stderr); - } - - return mixed; -}; - -// On failure, `result.stdout|stderr|all` should contain the currently buffered stream -const getBufferedData = async (stream, streamPromise) => { - if (!stream) { - return; - } - - stream.destroy(); - - try { - return await streamPromise; - } catch (error) { - return error.bufferedData; - } -}; - -const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => { - if (!stream || !buffer) { - return; - } - - if (encoding) { - return getStream(stream, {encoding, maxBuffer}); - } - - return getStream.buffer(stream, {maxBuffer}); -}; - -// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all) -const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => { - const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer}); - const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer}); - const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2}); - - try { - return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]); - } catch (error) { - return Promise.all([ - {error, signal: error.signal, timedOut: error.timedOut}, - getBufferedData(stdout, stdoutPromise), - getBufferedData(stderr, stderrPromise), - getBufferedData(all, allPromise) - ]); - } -}; - -const validateInputSync = ({input}) => { - if (isStream(input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); - } -}; - -module.exports = { - handleInput, - makeAllStream, - getSpawnedResult, - validateInputSync -}; - diff --git a/node_modules/execa/license b/node_modules/execa/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/execa/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/execa/package.json b/node_modules/execa/package.json deleted file mode 100644 index 22556f2..0000000 --- a/node_modules/execa/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "execa", - "version": "5.1.1", - "description": "Process execution for humans", - "license": "MIT", - "repository": "sindresorhus/execa", - "funding": "https://github.com/sindresorhus/execa?sponsor=1", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=10" - }, - "scripts": { - "test": "xo && nyc ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "lib" - ], - "keywords": [ - "exec", - "child", - "process", - "execute", - "fork", - "execfile", - "spawn", - "file", - "shell", - "bin", - "binary", - "binaries", - "npm", - "path", - "local" - ], - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "devDependencies": { - "@types/node": "^14.14.10", - "ava": "^2.4.0", - "get-node": "^11.0.1", - "is-running": "^2.1.0", - "nyc": "^15.1.0", - "p-event": "^4.2.0", - "tempfile": "^3.0.0", - "tsd": "^0.13.1", - "xo": "^0.35.0" - }, - "nyc": { - "reporter": [ - "text", - "lcov" - ], - "exclude": [ - "**/fixtures/**", - "**/test.js", - "**/test/**" - ] - } -} diff --git a/node_modules/execa/readme.md b/node_modules/execa/readme.md deleted file mode 100644 index 843edbc..0000000 --- a/node_modules/execa/readme.md +++ /dev/null @@ -1,663 +0,0 @@ - -
- -[![Coverage Status](https://codecov.io/gh/sindresorhus/execa/branch/main/graph/badge.svg)](https://codecov.io/gh/sindresorhus/execa) - -> Process execution for humans - -## Why - -This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with: - -- Promise interface. -- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`. -- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform. -- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why) -- Higher max buffer. 100 MB instead of 200 KB. -- [Executes locally installed binaries by name.](#preferlocal) -- [Cleans up spawned processes when the parent process dies.](#cleanup) -- [Get interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal. [*(Async only)*](#execasyncfile-arguments-options) -- [Can specify file and arguments as a single string without a shell](#execacommandcommand-options) -- More descriptive errors. - -## Install - -``` -$ npm install execa -``` - -## Usage - -```js -const execa = require('execa'); - -(async () => { - const {stdout} = await execa('echo', ['unicorns']); - console.log(stdout); - //=> 'unicorns' -})(); -``` - -### Pipe the child process stdout to the parent - -```js -const execa = require('execa'); - -execa('echo', ['unicorns']).stdout.pipe(process.stdout); -``` - -### Handling Errors - -```js -const execa = require('execa'); - -(async () => { - // Catching an error - try { - await execa('unknown', ['command']); - } catch (error) { - console.log(error); - /* - { - message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', - errno: -2, - code: 'ENOENT', - syscall: 'spawn unknown', - path: 'unknown', - spawnargs: ['command'], - originalMessage: 'spawn unknown ENOENT', - shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', - command: 'unknown command', - escapedCommand: 'unknown command', - stdout: '', - stderr: '', - all: '', - failed: true, - timedOut: false, - isCanceled: false, - killed: false - } - */ - } - -})(); -``` - -### Cancelling a spawned process - -```js -const execa = require('execa'); - -(async () => { - const subprocess = execa('node'); - - setTimeout(() => { - subprocess.cancel(); - }, 1000); - - try { - await subprocess; - } catch (error) { - console.log(subprocess.killed); // true - console.log(error.isCanceled); // true - } -})() -``` - -### Catching an error with the sync method - -```js -try { - execa.sync('unknown', ['command']); -} catch (error) { - console.log(error); - /* - { - message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', - errno: -2, - code: 'ENOENT', - syscall: 'spawnSync unknown', - path: 'unknown', - spawnargs: ['command'], - originalMessage: 'spawnSync unknown ENOENT', - shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', - command: 'unknown command', - escapedCommand: 'unknown command', - stdout: '', - stderr: '', - all: '', - failed: true, - timedOut: false, - isCanceled: false, - killed: false - } - */ -} -``` - -### Kill a process - -Using SIGTERM, and after 2 seconds, kill it with SIGKILL. - -```js -const subprocess = execa('node'); - -setTimeout(() => { - subprocess.kill('SIGTERM', { - forceKillAfterTimeout: 2000 - }); -}, 1000); -``` - -## API - -### execa(file, arguments, options?) - -Execute a file. Think of this as a mix of [`child_process.execFile()`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback) and [`child_process.spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). - -No escaping/quoting is needed. - -Unless the [`shell`](#shell) option is used, no shell interpreter (Bash, `cmd.exe`, etc.) is used, so shell features such as variables substitution (`echo $PATH`) are not allowed. - -Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) which: - - is also a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult). - - exposes the following additional methods and properties. - -#### kill(signal?, options?) - -Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal) except: if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. - -##### options.forceKillAfterTimeout - -Type: `number | false`\ -Default: `5000` - -Milliseconds to wait for the child process to terminate before sending `SIGKILL`. - -Can be disabled with `false`. - -#### cancel() - -Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. - -#### all - -Type: `ReadableStream | undefined` - -Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). - -This is `undefined` if either: - - the [`all` option](#all-2) is `false` (the default value) - - both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) - -### execa.sync(file, arguments?, options?) - -Execute a file synchronously. - -Returns or throws a [`childProcessResult`](#childProcessResult). - -### execa.command(command, options?) - -Same as [`execa()`](#execafile-arguments-options) except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. - -If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. - -The [`shell` option](#shell) must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. - -### execa.commandSync(command, options?) - -Same as [`execa.command()`](#execacommand-command-options) but synchronous. - -Returns or throws a [`childProcessResult`](#childProcessResult). - -### execa.node(scriptPath, arguments?, options?) - -Execute a Node.js script as a child process. - -Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): - - the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options. - - the [`shell`](#shell) option cannot be used - - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) - -### childProcessResult - -Type: `object` - -Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. - -The child process [fails](#failed) when: -- its [exit code](#exitcode) is not `0` -- it was [killed](#killed) with a [signal](#signal) -- [timing out](#timedout) -- [being canceled](#iscanceled) -- there's not enough memory or there are already too many child processes - -#### command - -Type: `string` - -The file and arguments that were run, for logging purposes. - -This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). - -#### escapedCommand - -Type: `string` - -Same as [`command`](#command) but escaped. - -This is meant to be copy and pasted into a shell, for debugging purposes. -Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). - -#### exitCode - -Type: `number` - -The numeric exit code of the process that was run. - -#### stdout - -Type: `string | Buffer` - -The output of the process on stdout. - -#### stderr - -Type: `string | Buffer` - -The output of the process on stderr. - -#### all - -Type: `string | Buffer | undefined` - -The output of the process with `stdout` and `stderr` interleaved. - -This is `undefined` if either: - - the [`all` option](#all-2) is `false` (the default value) - - `execa.sync()` was used - -#### failed - -Type: `boolean` - -Whether the process failed to run. - -#### timedOut - -Type: `boolean` - -Whether the process timed out. - -#### isCanceled - -Type: `boolean` - -Whether the process was canceled. - -#### killed - -Type: `boolean` - -Whether the process was killed. - -#### signal - -Type: `string | undefined` - -The name of the signal that was used to terminate the process. For example, `SIGFPE`. - -If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. - -#### signalDescription - -Type: `string | undefined` - -A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. - -If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. - -#### message - -Type: `string` - -Error message when the child process failed to run. In addition to the [underlying error message](#originalMessage), it also contains some information related to why the child process errored. - -The child process [stderr](#stderr) then [stdout](#stdout) are appended to the end, separated with newlines and not interleaved. - -#### shortMessage - -Type: `string` - -This is the same as the [`message` property](#message) except it does not include the child process stdout/stderr. - -#### originalMessage - -Type: `string | undefined` - -Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. - -This is `undefined` unless the child process exited due to an `error` event or a timeout. - -### options - -Type: `object` - -#### cleanup - -Type: `boolean`\ -Default: `true` - -Kill the spawned process when the parent process exits unless either: - - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) - - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit - -#### preferLocal - -Type: `boolean`\ -Default: `false` - -Prefer locally installed binaries when looking for a binary to execute.\ -If you `$ npm install foo`, you can then `execa('foo')`. - -#### localDir - -Type: `string`\ -Default: `process.cwd()` - -Preferred path to find locally installed binaries in (use with `preferLocal`). - -#### execPath - -Type: `string`\ -Default: `process.execPath` (Current Node.js executable) - -Path to the Node.js executable to use in child processes. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - -Requires [`preferLocal`](#preferlocal) to be `true`. - -For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. - -#### buffer - -Type: `boolean`\ -Default: `true` - -Buffer the output from the spawned process. When set to `false`, you must read the output of [`stdout`](#stdout-1) and [`stderr`](#stderr-1) (or [`all`](#all) if the [`all`](#all-2) option is `true`). Otherwise the returned promise will not be resolved/rejected. - -If the spawned process fails, [`error.stdout`](#stdout), [`error.stderr`](#stderr), and [`error.all`](#all) will contain the buffered data. - -#### input - -Type: `string | Buffer | stream.Readable` - -Write some input to the `stdin` of your binary.\ -Streams are not allowed when using the synchronous methods. - -#### stdin - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### stdout - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### stderr - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### all - -Type: `boolean`\ -Default: `false` - -Add an `.all` property on the [promise](#all) and the [resolved value](#all-1). The property contains the output of the process with `stdout` and `stderr` interleaved. - -#### reject - -Type: `boolean`\ -Default: `true` - -Setting this to `false` resolves the promise with the error instead of rejecting it. - -#### stripFinalNewline - -Type: `boolean`\ -Default: `true` - -Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. - -#### extendEnv - -Type: `boolean`\ -Default: `true` - -Set to `false` if you don't want to extend the environment variables when providing the `env` property. - ---- - -Execa also accepts the below options which are the same as the options for [`child_process#spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options)/[`child_process#exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) - -#### cwd - -Type: `string`\ -Default: `process.cwd()` - -Current working directory of the child process. - -#### env - -Type: `object`\ -Default: `process.env` - -Environment key-value pairs. Extends automatically from `process.env`. Set [`extendEnv`](#extendenv) to `false` if you don't want this. - -#### argv0 - -Type: `string` - -Explicitly set the value of `argv[0]` sent to the child process. This will be set to `file` if not specified. - -#### stdio - -Type: `string | string[]`\ -Default: `pipe` - -Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. - -#### serialization - -Type: `string`\ -Default: `'json'` - -Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execa.node()`](#execanodescriptpath-arguments-options): - - `json`: Uses `JSON.stringify()` and `JSON.parse()`. - - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) - -Requires Node.js `13.2.0` or later. - -[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) - -#### detached - -Type: `boolean` - -Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). - -#### uid - -Type: `number` - -Sets the user identity of the process. - -#### gid - -Type: `number` - -Sets the group identity of the process. - -#### shell - -Type: `boolean | string`\ -Default: `false` - -If `true`, runs `file` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. - -We recommend against using this option since it is: -- not cross-platform, encouraging shell-specific syntax. -- slower, because of the additional shell interpretation. -- unsafe, potentially allowing command injection. - -#### encoding - -Type: `string | null`\ -Default: `utf8` - -Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. - -#### timeout - -Type: `number`\ -Default: `0` - -If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds. - -#### maxBuffer - -Type: `number`\ -Default: `100_000_000` (100 MB) - -Largest amount of data in bytes allowed on `stdout` or `stderr`. - -#### killSignal - -Type: `string | number`\ -Default: `SIGTERM` - -Signal value to be used when the spawned process will be killed. - -#### windowsVerbatimArguments - -Type: `boolean`\ -Default: `false` - -If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. - -#### windowsHide - -Type: `boolean`\ -Default: `true` - -On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. - -#### nodePath *(For `.node()` only)* - -Type: `string`\ -Default: [`process.execPath`](https://nodejs.org/api/process.html#process_process_execpath) - -Node.js executable used to create the child process. - -#### nodeOptions *(For `.node()` only)* - -Type: `string[]`\ -Default: [`process.execArgv`](https://nodejs.org/api/process.html#process_process_execargv) - -List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. - -## Tips - -### Retry on error - -Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package: - -```js -const pRetry = require('p-retry'); - -const run = async () => { - const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']); - return results; -}; - -(async () => { - console.log(await pRetry(run, {retries: 5})); -})(); -``` - -### Save and pipe output from a child process - -Let's say you want to show the output of a child process in real-time while also saving it to a variable. - -```js -const execa = require('execa'); - -const subprocess = execa('echo', ['foo']); -subprocess.stdout.pipe(process.stdout); - -(async () => { - const {stdout} = await subprocess; - console.log('child output:', stdout); -})(); -``` - -### Redirect output to a file - -```js -const execa = require('execa'); - -const subprocess = execa('echo', ['foo']) -subprocess.stdout.pipe(fs.createWriteStream('stdout.txt')) -``` - -### Redirect input from a file - -```js -const execa = require('execa'); - -const subprocess = execa('cat') -fs.createReadStream('stdin.txt').pipe(subprocess.stdin) -``` - -### Execute the current package's binary - -```js -const {getBinPathSync} = require('get-bin-path'); - -const binPath = getBinPathSync(); -const subprocess = execa(binPath); -``` - -`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up. - -## Related - -- [gulp-execa](https://github.com/ehmicky/gulp-execa) - Gulp plugin for `execa` -- [nvexeca](https://github.com/ehmicky/nvexeca) - Run `execa` using any Node.js version -- [sudo-prompt](https://github.com/jorangreef/sudo-prompt) - Run commands with elevated privileges. - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [@ehmicky](https://github.com/ehmicky) - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/fast-glob/LICENSE b/node_modules/fast-glob/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/fast-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/fast-glob/README.md b/node_modules/fast-glob/README.md deleted file mode 100644 index 1d7843a..0000000 --- a/node_modules/fast-glob/README.md +++ /dev/null @@ -1,830 +0,0 @@ -# fast-glob - -> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. - -This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. - -## Table of Contents - -
-Details - -* [Highlights](#highlights) -* [Old and modern mode](#old-and-modern-mode) -* [Pattern syntax](#pattern-syntax) - * [Basic syntax](#basic-syntax) - * [Advanced syntax](#advanced-syntax) -* [Installation](#installation) -* [API](#api) - * [Asynchronous](#asynchronous) - * [Synchronous](#synchronous) - * [Stream](#stream) - * [patterns](#patterns) - * [[options]](#options) - * [Helpers](#helpers) - * [generateTasks](#generatetaskspatterns-options) - * [isDynamicPattern](#isdynamicpatternpattern-options) - * [escapePath](#escapepathpath) - * [convertPathToPattern](#convertpathtopatternpath) -* [Options](#options-3) - * [Common](#common) - * [concurrency](#concurrency) - * [cwd](#cwd) - * [deep](#deep) - * [followSymbolicLinks](#followsymboliclinks) - * [fs](#fs) - * [ignore](#ignore) - * [suppressErrors](#suppresserrors) - * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) - * [Output control](#output-control) - * [absolute](#absolute) - * [markDirectories](#markdirectories) - * [objectMode](#objectmode) - * [onlyDirectories](#onlydirectories) - * [onlyFiles](#onlyfiles) - * [stats](#stats) - * [unique](#unique) - * [Matching control](#matching-control) - * [braceExpansion](#braceexpansion) - * [caseSensitiveMatch](#casesensitivematch) - * [dot](#dot) - * [extglob](#extglob) - * [globstar](#globstar) - * [baseNameMatch](#basenamematch) -* [FAQ](#faq) - * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) - * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) - * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) - * [How to use UNC path?](#how-to-use-unc-path) - * [Compatible with `node-glob`?](#compatible-with-node-glob) -* [Benchmarks](#benchmarks) - * [Server](#server) - * [Nettop](#nettop) -* [Changelog](#changelog) -* [License](#license) - -
- -## Highlights - -* Fast. Probably the fastest. -* Supports multiple and negative patterns. -* Synchronous, Promise and Stream API. -* Object mode. Can return more than just strings. -* Error-tolerant. - -## Old and modern mode - -This package works in two modes, depending on the environment in which it is used. - -* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. -* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. - -The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. - -## Pattern syntax - -> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. - -There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). - -> :book: This package uses [`micromatch`][micromatch] as a library for pattern matching. - -### Basic syntax - -* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). -* A double star or globstar (`**`) — matches zero or more directories. -* Question mark (`?`) – matches any single character except slashes (path separators). -* Sequence (`[seq]`) — matches any character in sequence. - -> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. - -Some examples: - -* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. -* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. -* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. - -### Advanced syntax - -* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. -* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). -* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). -* [Bash style brace expansions][micromatch_braces] (`{}`). -* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). -* [Regex groups][regular_expressions_brackets] (`(a|b)`). - -> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. - -Some examples: - -* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. -* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. -* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. -* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. - -## Installation - -```console -npm install fast-glob -``` - -## API - -### Asynchronous - -```js -fg(patterns, [options]) -fg.async(patterns, [options]) -fg.glob(patterns, [options]) -``` - -Returns a `Promise` with an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Synchronous - -```js -fg.sync(patterns, [options]) -fg.globSync(patterns, [options]) -``` - -Returns an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Stream - -```js -fg.stream(patterns, [options]) -fg.globStream(patterns, [options]) -``` - -Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. - -```js -const fg = require('fast-glob'); - -const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); - -for await (const entry of stream) { - // .editorconfig - // services/index.js -} -``` - -#### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -> :1234: [Pattern syntax](#pattern-syntax) -> -> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. - -#### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -### Helpers - -#### `generateTasks(patterns, [options])` - -Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). - -```js -fg.generateTasks('*'); - -[{ - base: '.', // Parent directory for all patterns inside this task - dynamic: true, // Dynamic or static patterns are in this task - patterns: ['*'], - positive: ['*'], - negative: [] -}] -``` - -##### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `isDynamicPattern(pattern, [options])` - -Returns `true` if the passed pattern is a dynamic pattern. - -> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - -```js -fg.isDynamicPattern('*'); // true -fg.isDynamicPattern('abc'); // false -``` - -##### pattern - -* Required: `true` -* Type: `string` - -Any correct pattern. - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `escapePath(path)` - -Returns the path with escaped special characters depending on the platform. - -* Posix: - * `*?|(){}[]`; - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * `\\` before non-special characters; -* Windows: - * `(){}[]` - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * Characters like `*?|` cannot be used in the path ([windows_naming_conventions][windows_naming_conventions]), so they will not be escaped; - -```js -fg.escapePath('!abc'); -// \\!abc -fg.escapePath('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac' -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.posix.escapePath('C:\\Program Files (x86)\\**\\*'); -// C:\\\\Program Files \\(x86\\)\\*\\*\\* -fg.win32.escapePath('C:\\Program Files (x86)\\**\\*'); -// Windows: C:\\Program Files \\(x86\\)\\**\\* -``` - -#### `convertPathToPattern(path)` - -Converts a path to a pattern depending on the platform, including special character escaping. - -* Posix. Works similarly to the `fg.posix.escapePath` method. -* Windows. Works similarly to the `fg.win32.escapePath` method, additionally converting backslashes to forward slashes in cases where they are not escape characters (`!()+@{}[]`). - -```js -fg.convertPathToPattern('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac'; -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.convertPathToPattern('C:/Program Files (x86)/**/*'); -// Posix: C:/Program Files \\(x86\\)/\\*\\*/\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.convertPathToPattern('C:\\Program Files (x86)\\**\\*'); -// Posix: C:\\\\Program Files \\(x86\\)\\*\\*\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.posix.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Posix: \\\\\\?\\\\c:\\\\Program Files \\(x86\\)/**/* (broken pattern) -fg.win32.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Windows: //?/c:/Program Files \\(x86\\)/**/* -``` - -## Options - -### Common options - -#### concurrency - -* Type: `number` -* Default: `os.cpus().length` - -Specifies the maximum number of concurrent requests from a reader to read directories. - -> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. - -
- -More details - -In Node, there are [two types of threads][nodejs_thread_pool]: Event Loop (code) and a Thread Pool (fs, dns, …). The thread pool size controlled by the `UV_THREADPOOL_SIZE` environment variable. Its default size is 4 ([documentation][libuv_thread_pool]). The pool is one for all tasks within a single Node process. - -Any code can make 4 real concurrent accesses to the file system. The rest of the FS requests will wait in the queue. - -> :book: Each new instance of FG in the same Node process will use the same Thread pool. - -But this package also has the `concurrency` option. This option allows you to control the number of concurrent accesses to the FS at the package level. By default, this package has a value equal to the number of cores available for the current Node process. This allows you to set a value smaller than the pool size (`concurrency: 1`) or, conversely, to prepare tasks for the pool queue more quickly (`concurrency: Number.POSITIVE_INFINITY`). - -So, in fact, this package can **only make 4 concurrent requests to the FS**. You can increase this value by using an environment variable (`UV_THREADPOOL_SIZE`), but in practice this does not give a multiple advantage. - -
- -#### cwd - -* Type: `string` -* Default: `process.cwd()` - -The current working directory in which to search. - -#### deep - -* Type: `number` -* Default: `Infinity` - -Specifies the maximum depth of a read directory relative to the start directory. - -For example, you have the following tree: - -```js -dir/ -└── one/ // 1 - └── two/ // 2 - └── file.js // 3 -``` - -```js -// With base directory -fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] -fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] - -// With cwd option -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] -``` - -> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. - -#### followSymbolicLinks - -* Type: `boolean` -* Default: `true` - -Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. - -> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. - -> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. - -#### fs - -* Type: `FileSystemAdapter` -* Default: `fs.*` - -Custom implementation of methods for working with the file system. Supports objects with enumerable properties only. - -```ts -export interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} -``` - -#### ignore - -* Type: `string[]` -* Default: `[]` - -An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. - -```js -dir/ -├── package-lock.json -└── package.json -``` - -```js -fg.sync(['*.json', '!package-lock.json']); // ['package.json'] -fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] -``` - -#### suppressErrors - -* Type: `boolean` -* Default: `false` - -By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. - -> :book: Can be useful when the directory has entries with a special level of access. - -#### throwErrorOnBrokenSymbolicLink - -* Type: `boolean` -* Default: `false` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -> :book: This option has no effect on errors when reading the symbolic link directory. - -### Output control - -#### absolute - -* Type: `boolean` -* Default: `false` - -Return the absolute path for entries. - -```js -fg.sync('*.js', { absolute: false }); // ['index.js'] -fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] -``` - -> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. - -#### markDirectories - -* Type: `boolean` -* Default: `false` - -Mark the directory path with the final slash. - -```js -fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] -fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] -``` - -#### objectMode - -* Type: `boolean` -* Default: `false` - -Returns objects (instead of strings) describing entries. - -```js -fg.sync('*', { objectMode: false }); // ['src/index.js'] -fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] -``` - -The object has the following fields: - -* name (`string`) — the last part of the path (basename) -* path (`string`) — full path relative to the pattern base directory -* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` - -> :book: An object is an internal representation of entry, so getting it does not affect performance. - -#### onlyDirectories - -* Type: `boolean` -* Default: `false` - -Return only directories. - -```js -fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] -fg.sync('*', { onlyDirectories: true }); // ['src'] -``` - -> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. - -#### onlyFiles - -* Type: `boolean` -* Default: `true` - -Return only files. - -```js -fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] -fg.sync('*', { onlyFiles: true }); // ['index.js'] -``` - -#### stats - -* Type: `boolean` -* Default: `false` - -Enables an [object mode](#objectmode) with an additional field: - -* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` - -```js -fg.sync('*', { stats: false }); // ['src/index.js'] -fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] -``` - -> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. -> -> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. - -#### unique - -* Type: `boolean` -* Default: `true` - -Ensures that the returned entries are unique. - -```js -fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] -fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] -``` - -If `true` and similar entries are found, the result is the first found. - -### Matching control - -#### braceExpansion - -* Type: `boolean` -* Default: `true` - -Enables Bash-like brace expansion. - -> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. - -```js -dir/ -├── abd -├── acd -└── a{b,c}d -``` - -```js -fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] -fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] -``` - -#### caseSensitiveMatch - -* Type: `boolean` -* Default: `true` - -Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. - -```js -dir/ -├── file.txt -└── File.txt -``` - -```js -fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] -fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] -``` - -#### dot - -* Type: `boolean` -* Default: `false` - -Allow patterns to match entries that begin with a period (`.`). - -> :book: Note that an explicit dot in a portion of the pattern will always match dot files. - -```js -dir/ -├── .editorconfig -└── package.json -``` - -```js -fg.sync('*', { dot: false }); // ['package.json'] -fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] -``` - -#### extglob - -* Type: `boolean` -* Default: `true` - -Enables Bash-like `extglob` functionality. - -> :1234: [Syntax description][micromatch_extglobs]. - -```js -dir/ -├── README.md -└── package.json -``` - -```js -fg.sync('*.+(json|md)', { extglob: false }); // [] -fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] -``` - -#### globstar - -* Type: `boolean` -* Default: `true` - -Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. - -```js -dir/ -└── a - └── b -``` - -```js -fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] -fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] -``` - -#### baseNameMatch - -* Type: `boolean` -* Default: `false` - -If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. - -```js -dir/ -└── one/ - └── file.md -``` - -```js -fg.sync('*.md', { baseNameMatch: false }); // [] -fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] -``` - -## FAQ - -## What is a static or dynamic pattern? - -All patterns can be divided into two types: - -* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. -* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. - -A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: - -* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled -* `\\` (the escape character) -* `*`, `?`, `!` (at the beginning of line) -* `[…]` -* `(…|…)` -* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) -* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) - -## How to write patterns on Windows? - -Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. - -**Bad** - -```ts -[ - 'directory\\*', - path.join(process.cwd(), '**') -] -``` - -**Good** - -```ts -[ - 'directory/*', - fg.convertPathToPattern(process.cwd()) + '/**' -] -``` - -> :book: Use the [`.convertPathToPattern`](#convertpathtopatternpath) package to convert Windows-style path to a Unix-style path. - -Read more about [matching with backslashes][micromatch_backslashes]. - -## Why are parentheses match wrong? - -```js -dir/ -└── (special-*file).txt -``` - -```js -fg.sync(['(special-*file).txt']) // [] -``` - -Refers to Bash. You need to escape special characters: - -```js -fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] -``` - -Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. Or use the [`.escapePath`](#escapepathpath). - -## How to exclude directory from reading? - -You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. - -```js -first/ -├── file.md -└── second/ - └── file.txt -``` - -If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. - -```js -fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] -fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] -``` - -> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. - -You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. - -## How to use UNC path? - -You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax) directly, but you can use them as [`cwd`](#cwd) directory or use the `fg.convertPathToPattern` method. - -```ts -// cwd -fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); -fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); - -// .convertPathToPattern -fg.sync(fg.convertPathToPattern('\\\\?\\c:\\Python27') + '/*'); -``` - -## Compatible with `node-glob`? - -| node-glob | fast-glob | -| :----------: | :-------: | -| `cwd` | [`cwd`](#cwd) | -| `root` | – | -| `dot` | [`dot`](#dot) | -| `nomount` | – | -| `mark` | [`markDirectories`](#markdirectories) | -| `nosort` | – | -| `nounique` | [`unique`](#unique) | -| `nobrace` | [`braceExpansion`](#braceexpansion) | -| `noglobstar` | [`globstar`](#globstar) | -| `noext` | [`extglob`](#extglob) | -| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | -| `matchBase` | [`baseNameMatch`](#basenamematch) | -| `nodir` | [`onlyFiles`](#onlyfiles) | -| `ignore` | [`ignore`](#ignore) | -| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | -| `realpath` | – | -| `absolute` | [`absolute`](#absolute) | - -## Benchmarks - -You can see results [here](https://github.com/mrmlnc/fast-glob/actions/workflows/benchmark.yml?query=branch%3Amaster) for every commit into the `main` branch. - -* **Product benchmark** – comparison with the main competitors. -* **Regress benchmark** – regression between the current version and the version from the npm registry. - -## Changelog - -See the [Releases section of our GitHub project][github_releases] for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. - -[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace -[github_releases]: https://github.com/mrmlnc/fast-glob/releases -[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) -[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html -[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes -[micromatch_braces]: https://github.com/micromatch/braces -[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing -[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs -[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes -[micromatch]: https://github.com/micromatch/micromatch -[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent -[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats -[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams -[node_js]: https://nodejs.org/en -[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode -[npm_normalize_path]: https://www.npmjs.com/package/normalize-path -[npm_unixify]: https://www.npmjs.com/package/unixify -[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash -[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals -[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets -[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html -[unc_path]: https://learn.microsoft.com/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc -[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity -[nodejs_thread_pool]: https://nodejs.org/en/docs/guides/dont-block-the-event-loop -[libuv_thread_pool]: http://docs.libuv.org/en/v1.x/threadpool.html -[windows_naming_conventions]: https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions diff --git a/node_modules/fast-glob/out/index.d.ts b/node_modules/fast-glob/out/index.d.ts deleted file mode 100644 index 46823bb..0000000 --- a/node_modules/fast-glob/out/index.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/// -import * as taskManager from './managers/tasks'; -import { Options as OptionsInternal } from './settings'; -import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; -type EntryObjectModePredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryStatsPredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; -declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; -declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; -declare namespace FastGlob { - type Options = OptionsInternal; - type Entry = EntryInternal; - type Task = taskManager.Task; - type Pattern = PatternInternal; - type FileSystemAdapter = FileSystemAdapterInternal; - const glob: typeof FastGlob; - const globSync: typeof sync; - const globStream: typeof stream; - const async: typeof FastGlob; - function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; - function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; - function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; - function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; - function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - namespace posix { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } - namespace win32 { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } -} -export = FastGlob; diff --git a/node_modules/fast-glob/out/index.js b/node_modules/fast-glob/out/index.js deleted file mode 100644 index 90365d4..0000000 --- a/node_modules/fast-glob/out/index.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; -const taskManager = require("./managers/tasks"); -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -const utils = require("./utils"); -async function FastGlob(source, options) { - assertPatternsInput(source); - const works = getWorks(source, async_1.default, options); - const result = await Promise.all(works); - return utils.array.flatten(result); -} -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -(function (FastGlob) { - FastGlob.glob = FastGlob; - FastGlob.globSync = sync; - FastGlob.globStream = stream; - FastGlob.async = FastGlob; - function sync(source, options) { - assertPatternsInput(source); - const works = getWorks(source, sync_1.default, options); - return utils.array.flatten(works); - } - FastGlob.sync = sync; - function stream(source, options) { - assertPatternsInput(source); - const works = getWorks(source, stream_1.default, options); - /** - * The stream returned by the provider cannot work with an asynchronous iterator. - * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. - * This affects performance (+25%). I don't see best solution right now. - */ - return utils.stream.merge(works); - } - FastGlob.stream = stream; - function generateTasks(source, options) { - assertPatternsInput(source); - const patterns = [].concat(source); - const settings = new settings_1.default(options); - return taskManager.generate(patterns, settings); - } - FastGlob.generateTasks = generateTasks; - function isDynamicPattern(source, options) { - assertPatternsInput(source); - const settings = new settings_1.default(options); - return utils.pattern.isDynamicPattern(source, settings); - } - FastGlob.isDynamicPattern = isDynamicPattern; - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escape(source); - } - FastGlob.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPathToPattern(source); - } - FastGlob.convertPathToPattern = convertPathToPattern; - let posix; - (function (posix) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapePosixPath(source); - } - posix.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPosixPathToPattern(source); - } - posix.convertPathToPattern = convertPathToPattern; - })(posix = FastGlob.posix || (FastGlob.posix = {})); - let win32; - (function (win32) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapeWindowsPath(source); - } - win32.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertWindowsPathToPattern(source); - } - win32.convertPathToPattern = convertPathToPattern; - })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); -})(FastGlob || (FastGlob = {})); -function getWorks(source, _Provider, options) { - const patterns = [].concat(source); - const settings = new settings_1.default(options); - const tasks = taskManager.generate(patterns, settings); - const provider = new _Provider(settings); - return tasks.map(provider.read, provider); -} -function assertPatternsInput(input) { - const source = [].concat(input); - const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); - if (!isValidSource) { - throw new TypeError('Patterns must be a string (non empty) or an array of strings'); - } -} -module.exports = FastGlob; diff --git a/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/fast-glob/out/managers/tasks.d.ts deleted file mode 100644 index 59d2c42..0000000 --- a/node_modules/fast-glob/out/managers/tasks.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import Settings from '../settings'; -import { Pattern, PatternsGroup } from '../types'; -export type Task = { - base: string; - dynamic: boolean; - patterns: Pattern[]; - positive: Pattern[]; - negative: Pattern[]; -}; -export declare function generate(input: Pattern[], settings: Settings): Task[]; -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; -export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; -export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; -export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/fast-glob/out/managers/tasks.js b/node_modules/fast-glob/out/managers/tasks.js deleted file mode 100644 index 335a765..0000000 --- a/node_modules/fast-glob/out/managers/tasks.js +++ /dev/null @@ -1,110 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = require("../utils"); -function generate(input, settings) { - const patterns = processPatterns(input, settings); - const ignore = processPatterns(settings.ignore, settings); - const positivePatterns = getPositivePatterns(patterns); - const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); - const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); - const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); - const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); - const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); - return staticTasks.concat(dynamicTasks); -} -exports.generate = generate; -function processPatterns(input, settings) { - let patterns = input; - /** - * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry - * and some problems with the micromatch package (see fast-glob issues: #365, #394). - * - * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown - * in matching in the case of a large set of patterns after expansion. - */ - if (settings.braceExpansion) { - patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); - } - /** - * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used - * at any nesting level. - * - * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change - * the pattern in the filter before creating a regular expression. There is no need to change the patterns - * in the application. Only on the input. - */ - if (settings.baseNameMatch) { - patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); - } - /** - * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. - */ - return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); -} -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -function convertPatternsToTasks(positive, negative, dynamic) { - const tasks = []; - const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); - const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); - const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); - const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); - tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); - /* - * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory - * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. - */ - if ('.' in insideCurrentDirectoryGroup) { - tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); - } - else { - tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); - } - return tasks; -} -exports.convertPatternsToTasks = convertPatternsToTasks; -function getPositivePatterns(patterns) { - return utils.pattern.getPositivePatterns(patterns); -} -exports.getPositivePatterns = getPositivePatterns; -function getNegativePatternsAsPositive(patterns, ignore) { - const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); - const positive = negative.map(utils.pattern.convertToPositivePattern); - return positive; -} -exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; -function groupPatternsByBaseDirectory(patterns) { - const group = {}; - return patterns.reduce((collection, pattern) => { - const base = utils.pattern.getBaseDirectory(pattern); - if (base in collection) { - collection[base].push(pattern); - } - else { - collection[base] = [pattern]; - } - return collection; - }, group); -} -exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; -function convertPatternGroupsToTasks(positive, negative, dynamic) { - return Object.keys(positive).map((base) => { - return convertPatternGroupToTask(base, positive[base], negative, dynamic); - }); -} -exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; -function convertPatternGroupToTask(base, positive, negative, dynamic) { - return { - dynamic, - positive, - negative, - base, - patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) - }; -} -exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/fast-glob/out/providers/async.d.ts deleted file mode 100644 index 2742616..0000000 --- a/node_modules/fast-glob/out/providers/async.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import ReaderAsync from '../readers/async'; -import Provider from './provider'; -export default class ProviderAsync extends Provider> { - protected _reader: ReaderAsync; - read(task: Task): Promise; - api(root: string, task: Task, options: ReaderOptions): Promise; -} diff --git a/node_modules/fast-glob/out/providers/async.js b/node_modules/fast-glob/out/providers/async.js deleted file mode 100644 index 0c5286e..0000000 --- a/node_modules/fast-glob/out/providers/async.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -const provider_1 = require("./provider"); -class ProviderAsync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new async_1.default(this._settings); - } - async read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = await this.api(root, task, options); - return entries.map((entry) => options.transform(entry)); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderAsync; diff --git a/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/fast-glob/out/providers/filters/deep.d.ts deleted file mode 100644 index 377fab8..0000000 --- a/node_modules/fast-glob/out/providers/filters/deep.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; -import Settings from '../../settings'; -export default class DeepFilter { - private readonly _settings; - private readonly _micromatchOptions; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _getMatcher; - private _getNegativePatternsRe; - private _filter; - private _isSkippedByDeep; - private _getEntryLevel; - private _isSkippedSymbolicLink; - private _isSkippedByPositivePatterns; - private _isSkippedByNegativePatterns; -} diff --git a/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/fast-glob/out/providers/filters/deep.js deleted file mode 100644 index 644bf41..0000000 --- a/node_modules/fast-glob/out/providers/filters/deep.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -const partial_1 = require("../matchers/partial"); -class DeepFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - } - getFilter(basePath, positive, negative) { - const matcher = this._getMatcher(positive); - const negativeRe = this._getNegativePatternsRe(negative); - return (entry) => this._filter(basePath, entry, matcher, negativeRe); - } - _getMatcher(patterns) { - return new partial_1.default(patterns, this._settings, this._micromatchOptions); - } - _getNegativePatternsRe(patterns) { - const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); - return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); - } - _filter(basePath, entry, matcher, negativeRe) { - if (this._isSkippedByDeep(basePath, entry.path)) { - return false; - } - if (this._isSkippedSymbolicLink(entry)) { - return false; - } - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._isSkippedByPositivePatterns(filepath, matcher)) { - return false; - } - return this._isSkippedByNegativePatterns(filepath, negativeRe); - } - _isSkippedByDeep(basePath, entryPath) { - /** - * Avoid unnecessary depth calculations when it doesn't matter. - */ - if (this._settings.deep === Infinity) { - return false; - } - return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; - } - _getEntryLevel(basePath, entryPath) { - const entryPathDepth = entryPath.split('/').length; - if (basePath === '') { - return entryPathDepth; - } - const basePathDepth = basePath.split('/').length; - return entryPathDepth - basePathDepth; - } - _isSkippedSymbolicLink(entry) { - return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); - } - _isSkippedByPositivePatterns(entryPath, matcher) { - return !this._settings.baseNameMatch && !matcher.match(entryPath); - } - _isSkippedByNegativePatterns(entryPath, patternsRe) { - return !utils.pattern.matchAny(entryPath, patternsRe); - } -} -exports.default = DeepFilter; diff --git a/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/fast-glob/out/providers/filters/entry.d.ts deleted file mode 100644 index 23db353..0000000 --- a/node_modules/fast-glob/out/providers/filters/entry.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import Settings from '../../settings'; -import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; -export default class EntryFilter { - private readonly _settings; - private readonly _micromatchOptions; - readonly index: Map; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _filter; - private _isDuplicateEntry; - private _createIndexRecord; - private _onlyFileFilter; - private _onlyDirectoryFilter; - private _isMatchToPatternsSet; - private _isMatchToAbsoluteNegative; - private _isMatchToPatterns; -} diff --git a/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/fast-glob/out/providers/filters/entry.js deleted file mode 100644 index 0c9210c..0000000 --- a/node_modules/fast-glob/out/providers/filters/entry.js +++ /dev/null @@ -1,85 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this.index = new Map(); - } - getFilter(positive, negative) { - const [absoluteNegative, relativeNegative] = utils.pattern.partitionAbsoluteAndRelative(negative); - const patterns = { - positive: { - all: utils.pattern.convertPatternsToRe(positive, this._micromatchOptions) - }, - negative: { - absolute: utils.pattern.convertPatternsToRe(absoluteNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })), - relative: utils.pattern.convertPatternsToRe(relativeNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })) - } - }; - return (entry) => this._filter(entry, patterns); - } - _filter(entry, patterns) { - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._settings.unique && this._isDuplicateEntry(filepath)) { - return false; - } - if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { - return false; - } - const isMatched = this._isMatchToPatternsSet(filepath, patterns, entry.dirent.isDirectory()); - if (this._settings.unique && isMatched) { - this._createIndexRecord(filepath); - } - return isMatched; - } - _isDuplicateEntry(filepath) { - return this.index.has(filepath); - } - _createIndexRecord(filepath) { - this.index.set(filepath, undefined); - } - _onlyFileFilter(entry) { - return this._settings.onlyFiles && !entry.dirent.isFile(); - } - _onlyDirectoryFilter(entry) { - return this._settings.onlyDirectories && !entry.dirent.isDirectory(); - } - _isMatchToPatternsSet(filepath, patterns, isDirectory) { - const isMatched = this._isMatchToPatterns(filepath, patterns.positive.all, isDirectory); - if (!isMatched) { - return false; - } - const isMatchedByRelativeNegative = this._isMatchToPatterns(filepath, patterns.negative.relative, isDirectory); - if (isMatchedByRelativeNegative) { - return false; - } - const isMatchedByAbsoluteNegative = this._isMatchToAbsoluteNegative(filepath, patterns.negative.absolute, isDirectory); - if (isMatchedByAbsoluteNegative) { - return false; - } - return true; - } - _isMatchToAbsoluteNegative(filepath, patternsRe, isDirectory) { - if (patternsRe.length === 0) { - return false; - } - const fullpath = utils.path.makeAbsolute(this._settings.cwd, filepath); - return this._isMatchToPatterns(fullpath, patternsRe, isDirectory); - } - _isMatchToPatterns(filepath, patternsRe, isDirectory) { - if (patternsRe.length === 0) { - return false; - } - // Trying to match files and directories by patterns. - const isMatched = utils.pattern.matchAny(filepath, patternsRe); - // A pattern with a trailling slash can be used for directory matching. - // To apply such pattern, we need to add a tralling slash to the path. - if (!isMatched && isDirectory) { - return utils.pattern.matchAny(filepath + '/', patternsRe); - } - return isMatched; - } -} -exports.default = EntryFilter; diff --git a/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/fast-glob/out/providers/filters/error.d.ts deleted file mode 100644 index 170eb25..0000000 --- a/node_modules/fast-glob/out/providers/filters/error.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { ErrorFilterFunction } from '../../types'; -export default class ErrorFilter { - private readonly _settings; - constructor(_settings: Settings); - getFilter(): ErrorFilterFunction; - private _isNonFatalError; -} diff --git a/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/fast-glob/out/providers/filters/error.js deleted file mode 100644 index 1c6f241..0000000 --- a/node_modules/fast-glob/out/providers/filters/error.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class ErrorFilter { - constructor(_settings) { - this._settings = _settings; - } - getFilter() { - return (error) => this._isNonFatalError(error); - } - _isNonFatalError(error) { - return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; - } -} -exports.default = ErrorFilter; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/fast-glob/out/providers/matchers/matcher.d.ts deleted file mode 100644 index d04c232..0000000 --- a/node_modules/fast-glob/out/providers/matchers/matcher.d.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { Pattern, MicromatchOptions, PatternRe } from '../../types'; -import Settings from '../../settings'; -export type PatternSegment = StaticPatternSegment | DynamicPatternSegment; -type StaticPatternSegment = { - dynamic: false; - pattern: Pattern; -}; -type DynamicPatternSegment = { - dynamic: true; - pattern: Pattern; - patternRe: PatternRe; -}; -export type PatternSection = PatternSegment[]; -export type PatternInfo = { - /** - * Indicates that the pattern has a globstar (more than a single section). - */ - complete: boolean; - pattern: Pattern; - segments: PatternSegment[]; - sections: PatternSection[]; -}; -export default abstract class Matcher { - private readonly _patterns; - private readonly _settings; - private readonly _micromatchOptions; - protected readonly _storage: PatternInfo[]; - constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); - private _fillStorage; - private _getPatternSegments; - private _splitSegmentsIntoSections; -} -export {}; diff --git a/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/fast-glob/out/providers/matchers/matcher.js deleted file mode 100644 index eae67c9..0000000 --- a/node_modules/fast-glob/out/providers/matchers/matcher.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class Matcher { - constructor(_patterns, _settings, _micromatchOptions) { - this._patterns = _patterns; - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this._storage = []; - this._fillStorage(); - } - _fillStorage() { - for (const pattern of this._patterns) { - const segments = this._getPatternSegments(pattern); - const sections = this._splitSegmentsIntoSections(segments); - this._storage.push({ - complete: sections.length <= 1, - pattern, - segments, - sections - }); - } - } - _getPatternSegments(pattern) { - const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); - return parts.map((part) => { - const dynamic = utils.pattern.isDynamicPattern(part, this._settings); - if (!dynamic) { - return { - dynamic: false, - pattern: part - }; - } - return { - dynamic: true, - pattern: part, - patternRe: utils.pattern.makeRe(part, this._micromatchOptions) - }; - }); - } - _splitSegmentsIntoSections(segments) { - return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); - } -} -exports.default = Matcher; diff --git a/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/fast-glob/out/providers/matchers/partial.d.ts deleted file mode 100644 index 91520f6..0000000 --- a/node_modules/fast-glob/out/providers/matchers/partial.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import Matcher from './matcher'; -export default class PartialMatcher extends Matcher { - match(filepath: string): boolean; -} diff --git a/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/fast-glob/out/providers/matchers/partial.js deleted file mode 100644 index 1dfffeb..0000000 --- a/node_modules/fast-glob/out/providers/matchers/partial.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const matcher_1 = require("./matcher"); -class PartialMatcher extends matcher_1.default { - match(filepath) { - const parts = filepath.split('/'); - const levels = parts.length; - const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); - for (const pattern of patterns) { - const section = pattern.sections[0]; - /** - * In this case, the pattern has a globstar and we must read all directories unconditionally, - * but only if the level has reached the end of the first group. - * - * fixtures/{a,b}/** - * ^ true/false ^ always true - */ - if (!pattern.complete && levels > section.length) { - return true; - } - const match = parts.every((part, index) => { - const segment = pattern.segments[index]; - if (segment.dynamic && segment.patternRe.test(part)) { - return true; - } - if (!segment.dynamic && segment.pattern === part) { - return true; - } - return false; - }); - if (match) { - return true; - } - } - return false; - } -} -exports.default = PartialMatcher; diff --git a/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/fast-glob/out/providers/provider.d.ts deleted file mode 100644 index 1053460..0000000 --- a/node_modules/fast-glob/out/providers/provider.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Task } from '../managers/tasks'; -import Settings from '../settings'; -import { MicromatchOptions, ReaderOptions } from '../types'; -import DeepFilter from './filters/deep'; -import EntryFilter from './filters/entry'; -import ErrorFilter from './filters/error'; -import EntryTransformer from './transformers/entry'; -export default abstract class Provider { - protected readonly _settings: Settings; - readonly errorFilter: ErrorFilter; - readonly entryFilter: EntryFilter; - readonly deepFilter: DeepFilter; - readonly entryTransformer: EntryTransformer; - constructor(_settings: Settings); - abstract read(_task: Task): T; - protected _getRootDirectory(task: Task): string; - protected _getReaderOptions(task: Task): ReaderOptions; - protected _getMicromatchOptions(): MicromatchOptions; -} diff --git a/node_modules/fast-glob/out/providers/provider.js b/node_modules/fast-glob/out/providers/provider.js deleted file mode 100644 index da88ee0..0000000 --- a/node_modules/fast-glob/out/providers/provider.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const deep_1 = require("./filters/deep"); -const entry_1 = require("./filters/entry"); -const error_1 = require("./filters/error"); -const entry_2 = require("./transformers/entry"); -class Provider { - constructor(_settings) { - this._settings = _settings; - this.errorFilter = new error_1.default(this._settings); - this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); - this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); - this.entryTransformer = new entry_2.default(this._settings); - } - _getRootDirectory(task) { - return path.resolve(this._settings.cwd, task.base); - } - _getReaderOptions(task) { - const basePath = task.base === '.' ? '' : task.base; - return { - basePath, - pathSegmentSeparator: '/', - concurrency: this._settings.concurrency, - deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), - entryFilter: this.entryFilter.getFilter(task.positive, task.negative), - errorFilter: this.errorFilter.getFilter(), - followSymbolicLinks: this._settings.followSymbolicLinks, - fs: this._settings.fs, - stats: this._settings.stats, - throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, - transform: this.entryTransformer.getTransformer() - }; - } - _getMicromatchOptions() { - return { - dot: this._settings.dot, - matchBase: this._settings.baseNameMatch, - nobrace: !this._settings.braceExpansion, - nocase: !this._settings.caseSensitiveMatch, - noext: !this._settings.extglob, - noglobstar: !this._settings.globstar, - posix: true, - strictSlashes: false - }; - } -} -exports.default = Provider; diff --git a/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/fast-glob/out/providers/stream.d.ts deleted file mode 100644 index 3d02a1f..0000000 --- a/node_modules/fast-glob/out/providers/stream.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -/// -import { Readable } from 'stream'; -import { Task } from '../managers/tasks'; -import ReaderStream from '../readers/stream'; -import { ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderStream extends Provider { - protected _reader: ReaderStream; - read(task: Task): Readable; - api(root: string, task: Task, options: ReaderOptions): Readable; -} diff --git a/node_modules/fast-glob/out/providers/stream.js b/node_modules/fast-glob/out/providers/stream.js deleted file mode 100644 index 85da62e..0000000 --- a/node_modules/fast-glob/out/providers/stream.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const stream_2 = require("../readers/stream"); -const provider_1 = require("./provider"); -class ProviderStream extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_2.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const source = this.api(root, task, options); - const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); - source - .once('error', (error) => destination.emit('error', error)) - .on('data', (entry) => destination.emit('data', options.transform(entry))) - .once('end', () => destination.emit('end')); - destination - .once('close', () => source.destroy()); - return destination; - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderStream; diff --git a/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/fast-glob/out/providers/sync.d.ts deleted file mode 100644 index 9c0fe1e..0000000 --- a/node_modules/fast-glob/out/providers/sync.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import ReaderSync from '../readers/sync'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderSync extends Provider { - protected _reader: ReaderSync; - read(task: Task): EntryItem[]; - api(root: string, task: Task, options: ReaderOptions): Entry[]; -} diff --git a/node_modules/fast-glob/out/providers/sync.js b/node_modules/fast-glob/out/providers/sync.js deleted file mode 100644 index d70aa1b..0000000 --- a/node_modules/fast-glob/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -const provider_1 = require("./provider"); -class ProviderSync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new sync_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = this.api(root, task, options); - return entries.map(options.transform); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderSync; diff --git a/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/fast-glob/out/providers/transformers/entry.d.ts deleted file mode 100644 index e9b85fa..0000000 --- a/node_modules/fast-glob/out/providers/transformers/entry.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { EntryTransformerFunction } from '../../types'; -export default class EntryTransformer { - private readonly _settings; - constructor(_settings: Settings); - getTransformer(): EntryTransformerFunction; - private _transform; -} diff --git a/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/fast-glob/out/providers/transformers/entry.js deleted file mode 100644 index d11903c..0000000 --- a/node_modules/fast-glob/out/providers/transformers/entry.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryTransformer { - constructor(_settings) { - this._settings = _settings; - } - getTransformer() { - return (entry) => this._transform(entry); - } - _transform(entry) { - let filepath = entry.path; - if (this._settings.absolute) { - filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); - filepath = utils.path.unixify(filepath); - } - if (this._settings.markDirectories && entry.dirent.isDirectory()) { - filepath += '/'; - } - if (!this._settings.objectMode) { - return filepath; - } - return Object.assign(Object.assign({}, entry), { path: filepath }); - } -} -exports.default = EntryTransformer; diff --git a/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/fast-glob/out/readers/async.d.ts deleted file mode 100644 index fbca428..0000000 --- a/node_modules/fast-glob/out/readers/async.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, ReaderOptions, Pattern } from '../types'; -import Reader from './reader'; -import ReaderStream from './stream'; -export default class ReaderAsync extends Reader> { - protected _walkAsync: typeof fsWalk.walk; - protected _readerStream: ReaderStream; - dynamic(root: string, options: ReaderOptions): Promise; - static(patterns: Pattern[], options: ReaderOptions): Promise; -} diff --git a/node_modules/fast-glob/out/readers/async.js b/node_modules/fast-glob/out/readers/async.js deleted file mode 100644 index d024145..0000000 --- a/node_modules/fast-glob/out/readers/async.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -const stream_1 = require("./stream"); -class ReaderAsync extends reader_1.default { - constructor() { - super(...arguments); - this._walkAsync = fsWalk.walk; - this._readerStream = new stream_1.default(this._settings); - } - dynamic(root, options) { - return new Promise((resolve, reject) => { - this._walkAsync(root, options, (error, entries) => { - if (error === null) { - resolve(entries); - } - else { - reject(error); - } - }); - }); - } - async static(patterns, options) { - const entries = []; - const stream = this._readerStream.static(patterns, options); - // After #235, replace it with an asynchronous iterator. - return new Promise((resolve, reject) => { - stream.once('error', reject); - stream.on('data', (entry) => entries.push(entry)); - stream.once('end', () => resolve(entries)); - }); - } -} -exports.default = ReaderAsync; diff --git a/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/fast-glob/out/readers/reader.d.ts deleted file mode 100644 index 2af16b6..0000000 --- a/node_modules/fast-glob/out/readers/reader.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -/// -import * as fs from 'fs'; -import * as fsStat from '@nodelib/fs.stat'; -import Settings from '../settings'; -import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; -export default abstract class Reader { - protected readonly _settings: Settings; - protected readonly _fsStatSettings: fsStat.Settings; - constructor(_settings: Settings); - abstract dynamic(root: string, options: ReaderOptions): T; - abstract static(patterns: Pattern[], options: ReaderOptions): T; - protected _getFullEntryPath(filepath: string): string; - protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; - protected _isFatalError(error: ErrnoException): boolean; -} diff --git a/node_modules/fast-glob/out/readers/reader.js b/node_modules/fast-glob/out/readers/reader.js deleted file mode 100644 index 7b40255..0000000 --- a/node_modules/fast-glob/out/readers/reader.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const utils = require("../utils"); -class Reader { - constructor(_settings) { - this._settings = _settings; - this._fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this._settings.followSymbolicLinks, - fs: this._settings.fs, - throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks - }); - } - _getFullEntryPath(filepath) { - return path.resolve(this._settings.cwd, filepath); - } - _makeEntry(stats, pattern) { - const entry = { - name: pattern, - path: pattern, - dirent: utils.fs.createDirentFromStats(pattern, stats) - }; - if (this._settings.stats) { - entry.stats = stats; - } - return entry; - } - _isFatalError(error) { - return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; - } -} -exports.default = Reader; diff --git a/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/fast-glob/out/readers/stream.d.ts deleted file mode 100644 index 1c74cac..0000000 --- a/node_modules/fast-glob/out/readers/stream.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import { Readable } from 'stream'; -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderStream extends Reader { - protected _walkStream: typeof fsWalk.walkStream; - protected _stat: typeof fsStat.stat; - dynamic(root: string, options: ReaderOptions): Readable; - static(patterns: Pattern[], options: ReaderOptions): Readable; - private _getEntry; - private _getStat; -} diff --git a/node_modules/fast-glob/out/readers/stream.js b/node_modules/fast-glob/out/readers/stream.js deleted file mode 100644 index 317c6d5..0000000 --- a/node_modules/fast-glob/out/readers/stream.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderStream extends reader_1.default { - constructor() { - super(...arguments); - this._walkStream = fsWalk.walkStream; - this._stat = fsStat.stat; - } - dynamic(root, options) { - return this._walkStream(root, options); - } - static(patterns, options) { - const filepaths = patterns.map(this._getFullEntryPath, this); - const stream = new stream_1.PassThrough({ objectMode: true }); - stream._write = (index, _enc, done) => { - return this._getEntry(filepaths[index], patterns[index], options) - .then((entry) => { - if (entry !== null && options.entryFilter(entry)) { - stream.push(entry); - } - if (index === filepaths.length - 1) { - stream.end(); - } - done(); - }) - .catch(done); - }; - for (let i = 0; i < filepaths.length; i++) { - stream.write(i); - } - return stream; - } - _getEntry(filepath, pattern, options) { - return this._getStat(filepath) - .then((stats) => this._makeEntry(stats, pattern)) - .catch((error) => { - if (options.errorFilter(error)) { - return null; - } - throw error; - }); - } - _getStat(filepath) { - return new Promise((resolve, reject) => { - this._stat(filepath, this._fsStatSettings, (error, stats) => { - return error === null ? resolve(stats) : reject(error); - }); - }); - } -} -exports.default = ReaderStream; diff --git a/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/fast-glob/out/readers/sync.d.ts deleted file mode 100644 index c96ffee..0000000 --- a/node_modules/fast-glob/out/readers/sync.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderSync extends Reader { - protected _walkSync: typeof fsWalk.walkSync; - protected _statSync: typeof fsStat.statSync; - dynamic(root: string, options: ReaderOptions): Entry[]; - static(patterns: Pattern[], options: ReaderOptions): Entry[]; - private _getEntry; - private _getStat; -} diff --git a/node_modules/fast-glob/out/readers/sync.js b/node_modules/fast-glob/out/readers/sync.js deleted file mode 100644 index 4704d65..0000000 --- a/node_modules/fast-glob/out/readers/sync.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderSync extends reader_1.default { - constructor() { - super(...arguments); - this._walkSync = fsWalk.walkSync; - this._statSync = fsStat.statSync; - } - dynamic(root, options) { - return this._walkSync(root, options); - } - static(patterns, options) { - const entries = []; - for (const pattern of patterns) { - const filepath = this._getFullEntryPath(pattern); - const entry = this._getEntry(filepath, pattern, options); - if (entry === null || !options.entryFilter(entry)) { - continue; - } - entries.push(entry); - } - return entries; - } - _getEntry(filepath, pattern, options) { - try { - const stats = this._getStat(filepath); - return this._makeEntry(stats, pattern); - } - catch (error) { - if (options.errorFilter(error)) { - return null; - } - throw error; - } - } - _getStat(filepath) { - return this._statSync(filepath, this._fsStatSettings); - } -} -exports.default = ReaderSync; diff --git a/node_modules/fast-glob/out/settings.d.ts b/node_modules/fast-glob/out/settings.d.ts deleted file mode 100644 index 76a74f8..0000000 --- a/node_modules/fast-glob/out/settings.d.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { FileSystemAdapter, Pattern } from './types'; -export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export type Options = { - /** - * Return the absolute path for entries. - * - * @default false - */ - absolute?: boolean; - /** - * If set to `true`, then patterns without slashes will be matched against - * the basename of the path if it contains slashes. - * - * @default false - */ - baseNameMatch?: boolean; - /** - * Enables Bash-like brace expansion. - * - * @default true - */ - braceExpansion?: boolean; - /** - * Enables a case-sensitive mode for matching files. - * - * @default true - */ - caseSensitiveMatch?: boolean; - /** - * Specifies the maximum number of concurrent requests from a reader to read - * directories. - * - * @default os.cpus().length - */ - concurrency?: number; - /** - * The current working directory in which to search. - * - * @default process.cwd() - */ - cwd?: string; - /** - * Specifies the maximum depth of a read directory relative to the start - * directory. - * - * @default Infinity - */ - deep?: number; - /** - * Allow patterns to match entries that begin with a period (`.`). - * - * @default false - */ - dot?: boolean; - /** - * Enables Bash-like `extglob` functionality. - * - * @default true - */ - extglob?: boolean; - /** - * Indicates whether to traverse descendants of symbolic link directories. - * - * @default true - */ - followSymbolicLinks?: boolean; - /** - * Custom implementation of methods for working with the file system. - * - * @default fs.* - */ - fs?: Partial; - /** - * Enables recursively repeats a pattern containing `**`. - * If `false`, `**` behaves exactly like `*`. - * - * @default true - */ - globstar?: boolean; - /** - * An array of glob patterns to exclude matches. - * This is an alternative way to use negative patterns. - * - * @default [] - */ - ignore?: Pattern[]; - /** - * Mark the directory path with the final slash. - * - * @default false - */ - markDirectories?: boolean; - /** - * Returns objects (instead of strings) describing entries. - * - * @default false - */ - objectMode?: boolean; - /** - * Return only directories. - * - * @default false - */ - onlyDirectories?: boolean; - /** - * Return only files. - * - * @default true - */ - onlyFiles?: boolean; - /** - * Enables an object mode (`objectMode`) with an additional `stats` field. - * - * @default false - */ - stats?: boolean; - /** - * By default this package suppress only `ENOENT` errors. - * Set to `true` to suppress any error. - * - * @default false - */ - suppressErrors?: boolean; - /** - * Throw an error when symbolic link is broken if `true` or safely - * return `lstat` call if `false`. - * - * @default false - */ - throwErrorOnBrokenSymbolicLink?: boolean; - /** - * Ensures that the returned entries are unique. - * - * @default true - */ - unique?: boolean; -}; -export default class Settings { - private readonly _options; - readonly absolute: boolean; - readonly baseNameMatch: boolean; - readonly braceExpansion: boolean; - readonly caseSensitiveMatch: boolean; - readonly concurrency: number; - readonly cwd: string; - readonly deep: number; - readonly dot: boolean; - readonly extglob: boolean; - readonly followSymbolicLinks: boolean; - readonly fs: FileSystemAdapter; - readonly globstar: boolean; - readonly ignore: Pattern[]; - readonly markDirectories: boolean; - readonly objectMode: boolean; - readonly onlyDirectories: boolean; - readonly onlyFiles: boolean; - readonly stats: boolean; - readonly suppressErrors: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly unique: boolean; - constructor(_options?: Options); - private _getValue; - private _getFileSystemMethods; -} diff --git a/node_modules/fast-glob/out/settings.js b/node_modules/fast-glob/out/settings.js deleted file mode 100644 index 23f916c..0000000 --- a/node_modules/fast-glob/out/settings.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -const os = require("os"); -/** - * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. - * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 - */ -const CPU_COUNT = Math.max(os.cpus().length, 1); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - lstatSync: fs.lstatSync, - stat: fs.stat, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -class Settings { - constructor(_options = {}) { - this._options = _options; - this.absolute = this._getValue(this._options.absolute, false); - this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); - this.braceExpansion = this._getValue(this._options.braceExpansion, true); - this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); - this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); - this.cwd = this._getValue(this._options.cwd, process.cwd()); - this.deep = this._getValue(this._options.deep, Infinity); - this.dot = this._getValue(this._options.dot, false); - this.extglob = this._getValue(this._options.extglob, true); - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); - this.fs = this._getFileSystemMethods(this._options.fs); - this.globstar = this._getValue(this._options.globstar, true); - this.ignore = this._getValue(this._options.ignore, []); - this.markDirectories = this._getValue(this._options.markDirectories, false); - this.objectMode = this._getValue(this._options.objectMode, false); - this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); - this.onlyFiles = this._getValue(this._options.onlyFiles, true); - this.stats = this._getValue(this._options.stats, false); - this.suppressErrors = this._getValue(this._options.suppressErrors, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); - this.unique = this._getValue(this._options.unique, true); - if (this.onlyDirectories) { - this.onlyFiles = false; - } - if (this.stats) { - this.objectMode = true; - } - // Remove the cast to the array in the next major (#404). - this.ignore = [].concat(this.ignore); - } - _getValue(option, value) { - return option === undefined ? value : option; - } - _getFileSystemMethods(methods = {}) { - return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); - } -} -exports.default = Settings; diff --git a/node_modules/fast-glob/out/types/index.d.ts b/node_modules/fast-glob/out/types/index.d.ts deleted file mode 100644 index 6506caf..0000000 --- a/node_modules/fast-glob/out/types/index.d.ts +++ /dev/null @@ -1,31 +0,0 @@ -/// -import * as fsWalk from '@nodelib/fs.walk'; -export type ErrnoException = NodeJS.ErrnoException; -export type Entry = fsWalk.Entry; -export type EntryItem = string | Entry; -export type Pattern = string; -export type PatternRe = RegExp; -export type PatternsGroup = Record; -export type ReaderOptions = fsWalk.Options & { - transform(entry: Entry): EntryItem; - deepFilter: DeepFilterFunction; - entryFilter: EntryFilterFunction; - errorFilter: ErrorFilterFunction; - fs: FileSystemAdapter; - stats: boolean; -}; -export type ErrorFilterFunction = fsWalk.ErrorFilterFunction; -export type EntryFilterFunction = fsWalk.EntryFilterFunction; -export type DeepFilterFunction = fsWalk.DeepFilterFunction; -export type EntryTransformerFunction = (entry: Entry) => EntryItem; -export type MicromatchOptions = { - dot?: boolean; - matchBase?: boolean; - nobrace?: boolean; - nocase?: boolean; - noext?: boolean; - noglobstar?: boolean; - posix?: boolean; - strictSlashes?: boolean; -}; -export type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/fast-glob/out/types/index.js b/node_modules/fast-glob/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/fast-glob/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/fast-glob/out/utils/array.d.ts deleted file mode 100644 index 98e7325..0000000 --- a/node_modules/fast-glob/out/utils/array.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function flatten(items: T[][]): T[]; -export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/fast-glob/out/utils/array.js b/node_modules/fast-glob/out/utils/array.js deleted file mode 100644 index 50c406e..0000000 --- a/node_modules/fast-glob/out/utils/array.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.splitWhen = exports.flatten = void 0; -function flatten(items) { - return items.reduce((collection, item) => [].concat(collection, item), []); -} -exports.flatten = flatten; -function splitWhen(items, predicate) { - const result = [[]]; - let groupIndex = 0; - for (const item of items) { - if (predicate(item)) { - groupIndex++; - result[groupIndex] = []; - } - else { - result[groupIndex].push(item); - } - } - return result; -} -exports.splitWhen = splitWhen; diff --git a/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/fast-glob/out/utils/errno.d.ts deleted file mode 100644 index 1c08d3b..0000000 --- a/node_modules/fast-glob/out/utils/errno.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { ErrnoException } from '../types'; -export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/fast-glob/out/utils/errno.js b/node_modules/fast-glob/out/utils/errno.js deleted file mode 100644 index f0bd801..0000000 --- a/node_modules/fast-glob/out/utils/errno.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEnoentCodeError = void 0; -function isEnoentCodeError(error) { - return error.code === 'ENOENT'; -} -exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/fast-glob/out/utils/fs.d.ts deleted file mode 100644 index 64c61ce..0000000 --- a/node_modules/fast-glob/out/utils/fs.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import * as fs from 'fs'; -import { Dirent } from '@nodelib/fs.walk'; -export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/fast-glob/out/utils/fs.js b/node_modules/fast-glob/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/fast-glob/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/fast-glob/out/utils/index.d.ts deleted file mode 100644 index f634cad..0000000 --- a/node_modules/fast-glob/out/utils/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import * as array from './array'; -import * as errno from './errno'; -import * as fs from './fs'; -import * as path from './path'; -import * as pattern from './pattern'; -import * as stream from './stream'; -import * as string from './string'; -export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/fast-glob/out/utils/index.js b/node_modules/fast-glob/out/utils/index.js deleted file mode 100644 index 0f92c16..0000000 --- a/node_modules/fast-glob/out/utils/index.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; -const array = require("./array"); -exports.array = array; -const errno = require("./errno"); -exports.errno = errno; -const fs = require("./fs"); -exports.fs = fs; -const path = require("./path"); -exports.path = path; -const pattern = require("./pattern"); -exports.pattern = pattern; -const stream = require("./stream"); -exports.stream = stream; -const string = require("./string"); -exports.string = string; diff --git a/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/fast-glob/out/utils/path.d.ts deleted file mode 100644 index 0b13f4b..0000000 --- a/node_modules/fast-glob/out/utils/path.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Pattern } from '../types'; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -export declare function unixify(filepath: string): string; -export declare function makeAbsolute(cwd: string, filepath: string): string; -export declare function removeLeadingDotSegment(entry: string): string; -export declare const escape: typeof escapeWindowsPath; -export declare function escapeWindowsPath(pattern: Pattern): Pattern; -export declare function escapePosixPath(pattern: Pattern): Pattern; -export declare const convertPathToPattern: typeof convertWindowsPathToPattern; -export declare function convertWindowsPathToPattern(filepath: string): Pattern; -export declare function convertPosixPathToPattern(filepath: string): Pattern; diff --git a/node_modules/fast-glob/out/utils/path.js b/node_modules/fast-glob/out/utils/path.js deleted file mode 100644 index 7b53b39..0000000 --- a/node_modules/fast-glob/out/utils/path.js +++ /dev/null @@ -1,68 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; -const os = require("os"); -const path = require("path"); -const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; -const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ -/** - * All non-escaped special characters. - * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. - * Windows: (){}[], !+@ before (, ! at the beginning. - */ -const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; -const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; -/** - * The device path (\\.\ or \\?\). - * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths - */ -const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; -/** - * All backslashes except those escaping special characters. - * Windows: !()+@{} - * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions - */ -const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -function unixify(filepath) { - return filepath.replace(/\\/g, '/'); -} -exports.unixify = unixify; -function makeAbsolute(cwd, filepath) { - return path.resolve(cwd, filepath); -} -exports.makeAbsolute = makeAbsolute; -function removeLeadingDotSegment(entry) { - // We do not use `startsWith` because this is 10x slower than current implementation for some cases. - // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with - if (entry.charAt(0) === '.') { - const secondCharactery = entry.charAt(1); - if (secondCharactery === '/' || secondCharactery === '\\') { - return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); - } - } - return entry; -} -exports.removeLeadingDotSegment = removeLeadingDotSegment; -exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; -function escapeWindowsPath(pattern) { - return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapeWindowsPath = escapeWindowsPath; -function escapePosixPath(pattern) { - return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapePosixPath = escapePosixPath; -exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; -function convertWindowsPathToPattern(filepath) { - return escapeWindowsPath(filepath) - .replace(DOS_DEVICE_PATH_RE, '//$1') - .replace(WINDOWS_BACKSLASHES_RE, '/'); -} -exports.convertWindowsPathToPattern = convertWindowsPathToPattern; -function convertPosixPathToPattern(filepath) { - return escapePosixPath(filepath); -} -exports.convertPosixPathToPattern = convertPosixPathToPattern; diff --git a/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/fast-glob/out/utils/pattern.d.ts deleted file mode 100644 index e3598a9..0000000 --- a/node_modules/fast-glob/out/utils/pattern.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { MicromatchOptions, Pattern, PatternRe } from '../types'; -type PatternTypeOptions = { - braceExpansion?: boolean; - caseSensitiveMatch?: boolean; - extglob?: boolean; -}; -export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function convertToPositivePattern(pattern: Pattern): Pattern; -export declare function convertToNegativePattern(pattern: Pattern): Pattern; -export declare function isNegativePattern(pattern: Pattern): boolean; -export declare function isPositivePattern(pattern: Pattern): boolean; -export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; -export declare function getBaseDirectory(pattern: Pattern): string; -export declare function hasGlobStar(pattern: Pattern): boolean; -export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; -export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; -export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; -export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; -export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; -export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; -export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; -export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -export declare function removeDuplicateSlashes(pattern: string): string; -export declare function partitionAbsoluteAndRelative(patterns: Pattern[]): Pattern[][]; -export declare function isAbsolute(pattern: string): boolean; -export {}; diff --git a/node_modules/fast-glob/out/utils/pattern.js b/node_modules/fast-glob/out/utils/pattern.js deleted file mode 100644 index b2924e7..0000000 --- a/node_modules/fast-glob/out/utils/pattern.js +++ /dev/null @@ -1,206 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isAbsolute = exports.partitionAbsoluteAndRelative = exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; -const path = require("path"); -const globParent = require("glob-parent"); -const micromatch = require("micromatch"); -const GLOBSTAR = '**'; -const ESCAPE_SYMBOL = '\\'; -const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; -const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; -const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; -const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; -const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; -/** - * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. - * The latter is due to the presence of the device path at the beginning of the UNC path. - */ -const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; -function isStaticPattern(pattern, options = {}) { - return !isDynamicPattern(pattern, options); -} -exports.isStaticPattern = isStaticPattern; -function isDynamicPattern(pattern, options = {}) { - /** - * A special case with an empty string is necessary for matching patterns that start with a forward slash. - * An empty string cannot be a dynamic pattern. - * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. - */ - if (pattern === '') { - return false; - } - /** - * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check - * filepath directly (without read directory). - */ - if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { - return true; - } - if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { - return true; - } - return false; -} -exports.isDynamicPattern = isDynamicPattern; -function hasBraceExpansion(pattern) { - const openingBraceIndex = pattern.indexOf('{'); - if (openingBraceIndex === -1) { - return false; - } - const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); - if (closingBraceIndex === -1) { - return false; - } - const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); - return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); -} -function convertToPositivePattern(pattern) { - return isNegativePattern(pattern) ? pattern.slice(1) : pattern; -} -exports.convertToPositivePattern = convertToPositivePattern; -function convertToNegativePattern(pattern) { - return '!' + pattern; -} -exports.convertToNegativePattern = convertToNegativePattern; -function isNegativePattern(pattern) { - return pattern.startsWith('!') && pattern[1] !== '('; -} -exports.isNegativePattern = isNegativePattern; -function isPositivePattern(pattern) { - return !isNegativePattern(pattern); -} -exports.isPositivePattern = isPositivePattern; -function getNegativePatterns(patterns) { - return patterns.filter(isNegativePattern); -} -exports.getNegativePatterns = getNegativePatterns; -function getPositivePatterns(patterns) { - return patterns.filter(isPositivePattern); -} -exports.getPositivePatterns = getPositivePatterns; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsInsideCurrentDirectory(patterns) { - return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); -} -exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsOutsideCurrentDirectory(patterns) { - return patterns.filter(isPatternRelatedToParentDirectory); -} -exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; -function isPatternRelatedToParentDirectory(pattern) { - return pattern.startsWith('..') || pattern.startsWith('./..'); -} -exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; -function getBaseDirectory(pattern) { - return globParent(pattern, { flipBackslashes: false }); -} -exports.getBaseDirectory = getBaseDirectory; -function hasGlobStar(pattern) { - return pattern.includes(GLOBSTAR); -} -exports.hasGlobStar = hasGlobStar; -function endsWithSlashGlobStar(pattern) { - return pattern.endsWith('/' + GLOBSTAR); -} -exports.endsWithSlashGlobStar = endsWithSlashGlobStar; -function isAffectDepthOfReadingPattern(pattern) { - const basename = path.basename(pattern); - return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); -} -exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; -function expandPatternsWithBraceExpansion(patterns) { - return patterns.reduce((collection, pattern) => { - return collection.concat(expandBraceExpansion(pattern)); - }, []); -} -exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; -function expandBraceExpansion(pattern) { - const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); - /** - * Sort the patterns by length so that the same depth patterns are processed side by side. - * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` - */ - patterns.sort((a, b) => a.length - b.length); - /** - * Micromatch can return an empty string in the case of patterns like `{a,}`. - */ - return patterns.filter((pattern) => pattern !== ''); -} -exports.expandBraceExpansion = expandBraceExpansion; -function getPatternParts(pattern, options) { - let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); - /** - * The scan method returns an empty array in some cases. - * See micromatch/picomatch#58 for more details. - */ - if (parts.length === 0) { - parts = [pattern]; - } - /** - * The scan method does not return an empty part for the pattern with a forward slash. - * This is another part of micromatch/picomatch#58. - */ - if (parts[0].startsWith('/')) { - parts[0] = parts[0].slice(1); - parts.unshift(''); - } - return parts; -} -exports.getPatternParts = getPatternParts; -function makeRe(pattern, options) { - return micromatch.makeRe(pattern, options); -} -exports.makeRe = makeRe; -function convertPatternsToRe(patterns, options) { - return patterns.map((pattern) => makeRe(pattern, options)); -} -exports.convertPatternsToRe = convertPatternsToRe; -function matchAny(entry, patternsRe) { - return patternsRe.some((patternRe) => patternRe.test(entry)); -} -exports.matchAny = matchAny; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -function removeDuplicateSlashes(pattern) { - return pattern.replace(DOUBLE_SLASH_RE, '/'); -} -exports.removeDuplicateSlashes = removeDuplicateSlashes; -function partitionAbsoluteAndRelative(patterns) { - const absolute = []; - const relative = []; - for (const pattern of patterns) { - if (isAbsolute(pattern)) { - absolute.push(pattern); - } - else { - relative.push(pattern); - } - } - return [absolute, relative]; -} -exports.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; -function isAbsolute(pattern) { - return path.isAbsolute(pattern); -} -exports.isAbsolute = isAbsolute; diff --git a/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/fast-glob/out/utils/stream.d.ts deleted file mode 100644 index 4daf913..0000000 --- a/node_modules/fast-glob/out/utils/stream.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -/// -import { Readable } from 'stream'; -export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/fast-glob/out/utils/stream.js b/node_modules/fast-glob/out/utils/stream.js deleted file mode 100644 index b32028c..0000000 --- a/node_modules/fast-glob/out/utils/stream.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.merge = void 0; -const merge2 = require("merge2"); -function merge(streams) { - const mergedStream = merge2(streams); - streams.forEach((stream) => { - stream.once('error', (error) => mergedStream.emit('error', error)); - }); - mergedStream.once('close', () => propagateCloseEventToSources(streams)); - mergedStream.once('end', () => propagateCloseEventToSources(streams)); - return mergedStream; -} -exports.merge = merge; -function propagateCloseEventToSources(streams) { - streams.forEach((stream) => stream.emit('close')); -} diff --git a/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/fast-glob/out/utils/string.d.ts deleted file mode 100644 index c884735..0000000 --- a/node_modules/fast-glob/out/utils/string.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function isString(input: unknown): input is string; -export declare function isEmpty(input: string): boolean; diff --git a/node_modules/fast-glob/out/utils/string.js b/node_modules/fast-glob/out/utils/string.js deleted file mode 100644 index 76e7ea5..0000000 --- a/node_modules/fast-glob/out/utils/string.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEmpty = exports.isString = void 0; -function isString(input) { - return typeof input === 'string'; -} -exports.isString = isString; -function isEmpty(input) { - return input === ''; -} -exports.isEmpty = isEmpty; diff --git a/node_modules/fast-glob/package.json b/node_modules/fast-glob/package.json deleted file mode 100644 index e910de9..0000000 --- a/node_modules/fast-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "fast-glob", - "version": "3.3.3", - "description": "It's a very fast and efficient glob library for Node.js", - "license": "MIT", - "repository": "mrmlnc/fast-glob", - "author": { - "name": "Denis Malinochkin", - "url": "https://mrmlnc.com" - }, - "engines": { - "node": ">=8.6.0" - }, - "main": "out/index.js", - "typings": "out/index.d.ts", - "files": [ - "out", - "!out/{benchmark,tests}", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "keywords": [ - "glob", - "patterns", - "fast", - "implementation" - ], - "devDependencies": { - "@nodelib/fs.macchiato": "^1.0.1", - "@types/glob-parent": "^5.1.0", - "@types/merge2": "^1.1.4", - "@types/micromatch": "^4.0.0", - "@types/mocha": "^5.2.7", - "@types/node": "^14.18.53", - "@types/picomatch": "^2.3.0", - "@types/sinon": "^7.5.0", - "bencho": "^0.1.1", - "eslint": "^6.5.1", - "eslint-config-mrmlnc": "^1.1.0", - "execa": "^7.1.1", - "fast-glob": "^3.0.4", - "fdir": "6.0.1", - "glob": "^10.0.0", - "hereby": "^1.8.1", - "mocha": "^6.2.1", - "rimraf": "^5.0.0", - "sinon": "^7.5.0", - "snap-shot-it": "^7.9.10", - "typescript": "^4.9.5" - }, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "scripts": { - "clean": "rimraf out", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "test:e2e": "mocha \"out/**/*.e2e.js\" -s 0", - "test:e2e:sync": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(sync\\)\"", - "test:e2e:async": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(async\\)\"", - "test:e2e:stream": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(stream\\)\"", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile -- -- --sourceMap --watch", - "bench:async": "npm run bench:product:async && npm run bench:regression:async", - "bench:stream": "npm run bench:product:stream && npm run bench:regression:stream", - "bench:sync": "npm run bench:product:sync && npm run bench:regression:sync", - "bench:product": "npm run bench:product:async && npm run bench:product:sync && npm run bench:product:stream", - "bench:product:async": "hereby bench:product:async", - "bench:product:sync": "hereby bench:product:sync", - "bench:product:stream": "hereby bench:product:stream", - "bench:regression": "npm run bench:regression:async && npm run bench:regression:sync && npm run bench:regression:stream", - "bench:regression:async": "hereby bench:regression:async", - "bench:regression:sync": "hereby bench:regression:sync", - "bench:regression:stream": "hereby bench:regression:stream" - } -} diff --git a/node_modules/fastq/.github/dependabot.yml b/node_modules/fastq/.github/dependabot.yml deleted file mode 100644 index 7e7cbe1..0000000 --- a/node_modules/fastq/.github/dependabot.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 - ignore: - - dependency-name: standard - versions: - - 16.0.3 diff --git a/node_modules/fastq/.github/workflows/ci.yml b/node_modules/fastq/.github/workflows/ci.yml deleted file mode 100644 index 09dc7a3..0000000 --- a/node_modules/fastq/.github/workflows/ci.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: ci - -on: [push, pull_request] - -jobs: - legacy: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install --production && npm install tape - - - name: Run tests - run: | - npm run legacy - - test: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [18.x, 20.x, 22.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Run tests - run: | - npm run test - - types: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: 16 - - - name: Install - run: | - npm install - - - name: Run types tests - run: | - npm run typescript diff --git a/node_modules/fastq/LICENSE b/node_modules/fastq/LICENSE deleted file mode 100644 index 27c7bb4..0000000 --- a/node_modules/fastq/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015-2020, Matteo Collina - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fastq/README.md b/node_modules/fastq/README.md deleted file mode 100644 index 1644111..0000000 --- a/node_modules/fastq/README.md +++ /dev/null @@ -1,312 +0,0 @@ -# fastq - -![ci][ci-url] -[![npm version][npm-badge]][npm-url] - -Fast, in memory work queue. - -Benchmarks (1 million tasks): - -* setImmediate: 812ms -* fastq: 854ms -* async.queue: 1298ms -* neoAsync.queue: 1249ms - -Obtained on node 12.16.1, on a dedicated server. - -If you need zero-overhead series function call, check out -[fastseries](http://npm.im/fastseries). For zero-overhead parallel -function call, check out [fastparallel](http://npm.im/fastparallel). - -[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) - - * Installation - * Usage - * API - * Licence & copyright - -## Install - -`npm i fastq --save` - -## Usage (callback API) - -```js -'use strict' - -const queue = require('fastq')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, arg * 2) -} -``` - -## Usage (promise API) - -```js -const queue = require('fastq').promise(worker, 1) - -async function worker (arg) { - return arg * 2 -} - -async function run () { - const result = await queue.push(42) - console.log('the result is', result) -} - -run() -``` - -### Setting "this" - -```js -'use strict' - -const that = { hello: 'world' } -const queue = require('fastq')(that, worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log(this) - console.log('the result is', result) -}) - -function worker (arg, cb) { - console.log(this) - cb(null, arg * 2) -} -``` - -### Using with TypeScript (callback API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queue, done } from "fastq"; - -type Task = { - id: number -} - -const q: queue = fastq(worker, 1) - -q.push({ id: 42}) - -function worker (arg: Task, cb: done) { - console.log(arg.id) - cb(null) -} -``` - -### Using with TypeScript (promise API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queueAsPromised } from "fastq"; - -type Task = { - id: number -} - -const q: queueAsPromised = fastq.promise(asyncWorker, 1) - -q.push({ id: 42}).catch((err) => console.error(err)) - -async function asyncWorker (arg: Task): Promise { - // No need for a try-catch block, fastq handles errors automatically - console.log(arg.id) -} -``` - -## API - -* fastqueue() -* queue#push() -* queue#unshift() -* queue#pause() -* queue#resume() -* queue#idle() -* queue#length() -* queue#getQueue() -* queue#kill() -* queue#killAndDrain() -* queue#error() -* queue#concurrency -* queue#drain -* queue#empty -* queue#saturated -* fastqueue.promise() - -------------------------------------------------------- - -### fastqueue([that], worker, concurrency) - -Creates a new queue. - -Arguments: - -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - -------------------------------------------------------- - -### queue.push(task, done) - -Add a task at the end of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.unshift(task, done) - -Add a task at the beginning of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.pause() - -Pause the processing of tasks. Currently worked tasks are not -stopped. - -------------------------------------------------------- - -### queue.resume() - -Resume the processing of tasks. - -------------------------------------------------------- - -### queue.idle() - -Returns `false` if there are tasks being processed or waiting to be processed. -`true` otherwise. - -------------------------------------------------------- - -### queue.length() - -Returns the number of tasks waiting to be processed (in the queue). - -------------------------------------------------------- - -### queue.getQueue() - -Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks - -------------------------------------------------------- - -### queue.kill() - -Removes all tasks waiting to be processed, and reset `drain` to an empty -function. - -------------------------------------------------------- - -### queue.killAndDrain() - -Same than `kill` but the `drain` function will be called before reset to empty. - -------------------------------------------------------- - -### queue.error(handler) - -Set a global error handler. `handler(err, task)` will be called -each time a task is completed, `err` will be not null if the task has thrown an error. - -------------------------------------------------------- - -### queue.concurrency - -Property that returns the number of concurrent tasks that could be executed in -parallel. It can be altered at runtime. - -------------------------------------------------------- - -### queue.paused - -Property (Read-Only) that returns `true` when the queue is in a paused state. - -------------------------------------------------------- - -### queue.drain - -Function that will be called when the last -item from the queue has been processed by a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.empty - -Function that will be called when the last -item from the queue has been assigned to a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.saturated - -Function that will be called when the queue hits the concurrency -limit. -It can be altered at runtime. - -------------------------------------------------------- - -### fastqueue.promise([that], worker(arg), concurrency) - -Creates a new queue with `Promise` apis. It also offers all the methods -and properties of the object returned by [`fastqueue`](#fastqueue) with the modified -[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. - -Node v10+ is required to use the promisified version. - -Arguments: -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. It MUST return a `Promise`. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - - -#### queue.push(task) => Promise - -Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.unshift(task) => Promise - -Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.drained() => Promise - -Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - -## License - -ISC - -[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg -[npm-badge]: https://badge.fury.io/js/fastq.svg -[npm-url]: https://badge.fury.io/js/fastq diff --git a/node_modules/fastq/SECURITY.md b/node_modules/fastq/SECURITY.md deleted file mode 100644 index dd9f1d5..0000000 --- a/node_modules/fastq/SECURITY.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security Policy - -## Supported Versions - -Use this section to tell people about which versions of your project are -currently being supported with security updates. - -| Version | Supported | -| ------- | ------------------ | -| 1.x | :white_check_mark: | -| < 1.0 | :x: | - -## Reporting a Vulnerability - -Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/fastq/bench.js b/node_modules/fastq/bench.js deleted file mode 100644 index 4eaa829..0000000 --- a/node_modules/fastq/bench.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const max = 1000000 -const fastqueue = require('./')(worker, 1) -const { promisify } = require('util') -const immediate = promisify(setImmediate) -const qPromise = require('./').promise(immediate, 1) -const async = require('async') -const neo = require('neo-async') -const asyncqueue = async.queue(worker, 1) -const neoqueue = neo.queue(worker, 1) - -function bench (func, done) { - const key = max + '*' + func.name - let count = -1 - - console.time(key) - end() - - function end () { - if (++count < max) { - func(end) - } else { - console.timeEnd(key) - if (done) { - done() - } - } - } -} - -function benchFastQ (done) { - fastqueue.push(42, done) -} - -function benchAsyncQueue (done) { - asyncqueue.push(42, done) -} - -function benchNeoQueue (done) { - neoqueue.push(42, done) -} - -function worker (arg, cb) { - setImmediate(cb) -} - -function benchSetImmediate (cb) { - worker(42, cb) -} - -function benchFastQPromise (done) { - qPromise.push(42).then(function () { done() }, done) -} - -function runBench (done) { - async.eachSeries([ - benchSetImmediate, - benchFastQ, - benchNeoQueue, - benchAsyncQueue, - benchFastQPromise - ], bench, done) -} - -runBench(runBench) diff --git a/node_modules/fastq/example.js b/node_modules/fastq/example.js deleted file mode 100644 index 665fdc8..0000000 --- a/node_modules/fastq/example.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var queue = require('./')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, 42 * 2) -} diff --git a/node_modules/fastq/example.mjs b/node_modules/fastq/example.mjs deleted file mode 100644 index 81be789..0000000 --- a/node_modules/fastq/example.mjs +++ /dev/null @@ -1,11 +0,0 @@ -import { promise as queueAsPromised } from './queue.js' - -/* eslint-disable */ - -const queue = queueAsPromised(worker, 1) - -console.log('the result is', await queue.push(42)) - -async function worker (arg) { - return 42 * 2 -} diff --git a/node_modules/fastq/index.d.ts b/node_modules/fastq/index.d.ts deleted file mode 100644 index 817cdb5..0000000 --- a/node_modules/fastq/index.d.ts +++ /dev/null @@ -1,57 +0,0 @@ -declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue -declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue - -declare namespace fastq { - type worker = (this: C, task: T, cb: fastq.done) => void - type asyncWorker = (this: C, task: T) => Promise - type done = (err: Error | null, result?: R) => void - type errorHandler = (err: Error, task: T) => void - - interface queue { - /** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */ - push(task: T, done?: done): void - /** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */ - unshift(task: T, done?: done): void - /** Pause the processing of tasks. Currently worked tasks are not stopped. */ - pause(): any - /** Resume the processing of tasks. */ - resume(): any - running(): number - /** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */ - idle(): boolean - /** Returns the number of tasks waiting to be processed (in the queue). */ - length(): number - /** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */ - getQueue(): T[] - /** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */ - kill(): any - /** Same than `kill` but the `drain` function will be called before reset to empty. */ - killAndDrain(): any - /** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */ - error(handler: errorHandler): void - /** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */ - concurrency: number - /** Property (Read-Only) that returns `true` when the queue is in a paused state. */ - readonly paused: boolean - /** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */ - drain(): any - /** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */ - empty: () => void - /** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */ - saturated: () => void - } - - interface queueAsPromised extends queue { - /** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ - push(task: T): Promise - /** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ - unshift(task: T): Promise - /** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */ - drained(): Promise - } - - function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised - function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised -} - -export = fastq diff --git a/node_modules/fastq/package.json b/node_modules/fastq/package.json deleted file mode 100644 index 989151f..0000000 --- a/node_modules/fastq/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "fastq", - "version": "1.19.1", - "description": "Fast, in memory work queue", - "main": "queue.js", - "scripts": { - "lint": "standard --verbose | snazzy", - "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", - "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", - "test:report": "npm run lint && npm run unit:report", - "test": "npm run lint && npm run unit", - "typescript": "tsc --project ./test/tsconfig.json", - "legacy": "tape test/test.js" - }, - "pre-commit": [ - "test", - "typescript" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/fastq.git" - }, - "keywords": [ - "fast", - "queue", - "async", - "worker" - ], - "author": "Matteo Collina ", - "license": "ISC", - "bugs": { - "url": "https://github.com/mcollina/fastq/issues" - }, - "homepage": "https://github.com/mcollina/fastq#readme", - "devDependencies": { - "async": "^3.1.0", - "neo-async": "^2.6.1", - "nyc": "^17.0.0", - "pre-commit": "^1.2.2", - "snazzy": "^9.0.0", - "standard": "^16.0.0", - "tape": "^5.0.0", - "typescript": "^5.0.4" - }, - "dependencies": { - "reusify": "^1.0.4" - }, - "standard": { - "ignore": [ - "example.mjs" - ] - } -} diff --git a/node_modules/fastq/queue.js b/node_modules/fastq/queue.js deleted file mode 100644 index 7ea8a31..0000000 --- a/node_modules/fastq/queue.js +++ /dev/null @@ -1,311 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var reusify = require('reusify') - -function fastqueue (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - if (!(_concurrency >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - - var cache = reusify(Task) - var queueHead = null - var queueTail = null - var _running = 0 - var errorHandler = null - - var self = { - push: push, - drain: noop, - saturated: noop, - pause: pause, - paused: false, - - get concurrency () { - return _concurrency - }, - set concurrency (value) { - if (!(value >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - _concurrency = value - - if (self.paused) return - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - }, - - running: running, - resume: resume, - idle: idle, - length: length, - getQueue: getQueue, - unshift: unshift, - empty: noop, - kill: kill, - killAndDrain: killAndDrain, - error: error - } - - return self - - function running () { - return _running - } - - function pause () { - self.paused = true - } - - function length () { - var current = queueHead - var counter = 0 - - while (current) { - current = current.next - counter++ - } - - return counter - } - - function getQueue () { - var current = queueHead - var tasks = [] - - while (current) { - tasks.push(current.value) - current = current.next - } - - return tasks - } - - function resume () { - if (!self.paused) return - self.paused = false - if (queueHead === null) { - _running++ - release() - return - } - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - } - - function idle () { - return _running === 0 && self.length() === 0 - } - - function push (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueTail) { - queueTail.next = current - queueTail = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function unshift (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueHead) { - current.next = queueHead - queueHead = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function release (holder) { - if (holder) { - cache.release(holder) - } - var next = queueHead - if (next && _running <= _concurrency) { - if (!self.paused) { - if (queueTail === queueHead) { - queueTail = null - } - queueHead = next.next - next.next = null - worker.call(context, next.value, next.worked) - if (queueTail === null) { - self.empty() - } - } else { - _running-- - } - } else if (--_running === 0) { - self.drain() - } - } - - function kill () { - queueHead = null - queueTail = null - self.drain = noop - } - - function killAndDrain () { - queueHead = null - queueTail = null - self.drain() - self.drain = noop - } - - function error (handler) { - errorHandler = handler - } -} - -function noop () {} - -function Task () { - this.value = null - this.callback = noop - this.next = null - this.release = noop - this.context = null - this.errorHandler = null - - var self = this - - this.worked = function worked (err, result) { - var callback = self.callback - var errorHandler = self.errorHandler - var val = self.value - self.value = null - self.callback = noop - if (self.errorHandler) { - errorHandler(err, val) - } - callback.call(self.context, err, result) - self.release(self) - } -} - -function queueAsPromised (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - function asyncWrapper (arg, cb) { - worker.call(this, arg) - .then(function (res) { - cb(null, res) - }, cb) - } - - var queue = fastqueue(context, asyncWrapper, _concurrency) - - var pushCb = queue.push - var unshiftCb = queue.unshift - - queue.push = push - queue.unshift = unshift - queue.drained = drained - - return queue - - function push (value) { - var p = new Promise(function (resolve, reject) { - pushCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function unshift (value) { - var p = new Promise(function (resolve, reject) { - unshiftCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function drained () { - var p = new Promise(function (resolve) { - process.nextTick(function () { - if (queue.idle()) { - resolve() - } else { - var previousDrain = queue.drain - queue.drain = function () { - if (typeof previousDrain === 'function') previousDrain() - resolve() - queue.drain = previousDrain - } - } - }) - }) - - return p - } -} - -module.exports = fastqueue -module.exports.promise = queueAsPromised diff --git a/node_modules/fastq/test/example.ts b/node_modules/fastq/test/example.ts deleted file mode 100644 index a47d441..0000000 --- a/node_modules/fastq/test/example.ts +++ /dev/null @@ -1,83 +0,0 @@ -import * as fastq from '../' -import { promise as queueAsPromised } from '../' - -// Basic example - -const queue = fastq(worker, 1) - -queue.push('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.push('push without cb') - -queue.concurrency - -queue.drain() - -queue.empty = () => undefined - -console.log('the queue tasks are', queue.getQueue()) - -queue.idle() - -queue.kill() - -queue.killAndDrain() - -queue.length - -queue.pause() - -queue.resume() - -queue.running() - -queue.saturated = () => undefined - -queue.unshift('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.unshift('unshift without cb') - -function worker(task: any, cb: fastq.done) { - cb(null, 'hello ' + task) -} - -// Generics example - -interface GenericsContext { - base: number; -} - -const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) - -genericsQueue.push(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -genericsQueue.unshift(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { - cb(null, 'the meaning of life is ' + (this.base * task)) -} - -const queue2 = queueAsPromised(asyncWorker, 1) - -async function asyncWorker(task: any) { - return 'hello ' + task -} - -async function run () { - await queue.push(42) - await queue.unshift(42) -} - -run() diff --git a/node_modules/fastq/test/promise.js b/node_modules/fastq/test/promise.js deleted file mode 100644 index 45349a4..0000000 --- a/node_modules/fastq/test/promise.js +++ /dev/null @@ -1,291 +0,0 @@ -'use strict' - -const test = require('tape') -const buildQueue = require('../').promise -const { promisify } = require('util') -const sleep = promisify(setTimeout) -const immediate = promisify(setImmediate) - -test('concurrency', function (t) { - t.plan(2) - t.throws(buildQueue.bind(null, worker, 0)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - async function worker (arg) { - return true - } -}) - -test('worker execution', async function (t) { - const queue = buildQueue(worker, 1) - - const result = await queue.push(42) - - t.equal(result, true, 'result matches') - - async function worker (arg) { - t.equal(arg, 42) - return true - } -}) - -test('limit', async function (t) { - const queue = buildQueue(worker, 1) - - const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) - t.equal(res1, 10, 'the result matches') - t.equal(res2, 0, 'the result matches') - - async function worker (arg) { - await sleep(arg) - return arg - } -}) - -test('multiple executions', async function (t) { - const queue = buildQueue(worker, 1) - const toExec = [1, 2, 3, 4, 5] - const expected = ['a', 'b', 'c', 'd', 'e'] - let count = 0 - - await Promise.all(toExec.map(async function (task, i) { - const result = await queue.push(task) - t.equal(result, expected[i], 'the result matches') - })) - - async function worker (arg) { - t.equal(arg, toExec[count], 'arg matches') - return expected[count++] - } -}) - -test('drained', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length) - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length * 2) -}) - -test('drained with exception should not throw', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - - async function worker () { - throw new Error('foo') - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() -}) - -test('drained with drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function () { - queue.push() - }) - - await queue.drained() - - t.equal(count, toExec.length) - t.equal(drainCalled, true) -}) - -test('drained while idle should resolve', async function (t) { - const queue = buildQueue(worker, 2) - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() -}) - -test('drained while idle should not call the drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() - - t.equal(drainCalled, false) -}) - -test('set this', async function (t) { - t.plan(1) - const that = {} - const queue = buildQueue(that, worker, 1) - - await queue.push(42) - - async function worker (arg) { - t.equal(this, that, 'this matches') - } -}) - -test('unshift', async function (t) { - const queue = buildQueue(worker, 1) - const expected = [1, 2, 3, 4] - - await Promise.all([ - queue.push(1), - queue.push(4), - queue.unshift(3), - queue.unshift(2) - ]) - - t.is(expected.length, 0) - - async function worker (arg) { - t.equal(expected.shift(), arg, 'tasks come in order') - } -}) - -test('push with worker throwing error', async function (t) { - t.plan(5) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - try { - await q.push(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('unshift with worker throwing error', async function (t) { - t.plan(2) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - try { - await q.unshift(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('no unhandledRejection (push)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.push(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) - -test('no unhandledRejection (unshift)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.unshift(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) - -test('drained should resolve after async tasks complete', async function (t) { - const logs = [] - - async function processTask () { - await new Promise(resolve => setTimeout(resolve, 0)) - logs.push('processed') - } - - const queue = buildQueue(processTask, 1) - queue.drain = () => logs.push('called drain') - - queue.drained().then(() => logs.push('drained promise resolved')) - - await Promise.all([ - queue.push(), - queue.push(), - queue.push() - ]) - - t.deepEqual(logs, [ - 'processed', - 'processed', - 'processed', - 'called drain', - 'drained promise resolved' - ], 'events happened in correct order') -}) - -test('drained should handle undefined drain function', async function (t) { - const queue = buildQueue(worker, 1) - - async function worker (arg) { - await sleep(10) - return arg - } - - queue.drain = undefined - queue.push(1) - await queue.drained() - - t.pass('drained resolved successfully with undefined drain') -}) diff --git a/node_modules/fastq/test/test.js b/node_modules/fastq/test/test.js deleted file mode 100644 index 79f0f6c..0000000 --- a/node_modules/fastq/test/test.js +++ /dev/null @@ -1,653 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var test = require('tape') -var buildQueue = require('../') - -test('concurrency', function (t) { - t.plan(6) - t.throws(buildQueue.bind(null, worker, 0)) - t.throws(buildQueue.bind(null, worker, NaN)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - var queue = buildQueue(worker, 1) - t.throws(function () { - queue.concurrency = 0 - }) - t.throws(function () { - queue.concurrency = NaN - }) - t.doesNotThrow(function () { - queue.concurrency = 2 - }) - - function worker (arg, cb) { - cb(null, true) - } -}) - -test('worker execution', function (t) { - t.plan(3) - - var queue = buildQueue(worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - function worker (arg, cb) { - t.equal(arg, 42) - cb(null, true) - } -}) - -test('limit', function (t) { - t.plan(4) - - var expected = [10, 0] - var queue = buildQueue(worker, 1) - - queue.push(10, result) - queue.push(0, result) - - function result (err, arg) { - t.error(err, 'no error') - t.equal(arg, expected.shift(), 'the result matches') - } - - function worker (arg, cb) { - setTimeout(cb, arg, null, arg) - } -}) - -test('multiple executions', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - toExec.forEach(function (task) { - queue.push(task, done) - }) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('multiple executions, one after another', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - queue.push(toExec[0], done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - if (count < toExec.length) { - queue.push(toExec[count], done) - } - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('set this', function (t) { - t.plan(3) - - var that = {} - var queue = buildQueue(that, worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(this, that, 'this matches') - }) - - function worker (arg, cb) { - t.equal(this, that, 'this matches') - cb(null, true) - } -}) - -test('drain', function (t) { - t.plan(4) - - var queue = buildQueue(worker, 1) - var worked = false - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.drain = function () { - t.equal(true, worked, 'drained') - } - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - setImmediate(cb, null, true) - } -}) - -test('pause && resume', function (t) { - t.plan(13) - - var queue = buildQueue(worker, 1) - var worked = false - var expected = [42, 24] - - t.notOk(queue.paused, 'it should not be paused') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - - queue.resume() - queue.pause() - queue.resume() - queue.resume() // second resume is a no-op - - function worker (arg, cb) { - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - worked = true - process.nextTick(function () { cb(null, true) }) - } -}) - -test('pause in flight && resume', function (t) { - t.plan(16) - - var queue = buildQueue(worker, 1) - var expected = [42, 24, 12] - - t.notOk(queue.paused, 'it should not be paused') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.ok(queue.paused, 'it should be paused') - process.nextTick(function () { - queue.resume() - queue.pause() - queue.resume() - }) - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.push(12, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.pause() - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - process.nextTick(function () { cb(null, true) }) - } -}) - -test('altering concurrency', function (t) { - t.plan(24) - - var queue = buildQueue(worker, 1) - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - queue.pause() - - queue.concurrency = 3 // concurrency changes are ignored while paused - queue.concurrency = 2 - - queue.resume() - - t.equal(queue.running(), 2, '2 jobs running') - - queue.concurrency = 3 - - t.equal(queue.running(), 3, '3 jobs running') - - queue.concurrency = 1 - - t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - function workDone (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('idle()', function (t) { - t.plan(12) - - var queue = buildQueue(worker, 1) - - t.ok(queue.idle(), 'queue is idle') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.idle(), 'queue is not idle') - }) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - // it will go idle after executing this function - setImmediate(function () { - t.ok(queue.idle(), 'queue is now idle') - }) - }) - - t.notOk(queue.idle(), 'queue is not idle') - - function worker (arg, cb) { - t.notOk(queue.idle(), 'queue is not idle') - t.equal(arg, 42) - setImmediate(cb, null, true) - } -}) - -test('saturated', function (t) { - t.plan(9) - - var queue = buildQueue(worker, 1) - var preworked = 0 - var worked = 0 - - queue.saturated = function () { - t.pass('saturated') - t.equal(preworked, 1, 'started 1 task') - t.equal(worked, 0, 'worked zero task') - } - - queue.push(42, done) - queue.push(42, done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.equal(arg, 42) - preworked++ - setImmediate(function () { - worked++ - cb(null, true) - }) - } -}) - -test('length', function (t) { - t.plan(7) - - var queue = buildQueue(worker, 1) - - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 1, 'one task waiting') - queue.push(42, done) - t.equal(queue.length(), 2, 'two tasks waiting') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('getQueue', function (t) { - t.plan(10) - - var queue = buildQueue(worker, 1) - - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 1, 'one task waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - queue.push(43, done) - t.equal(queue.getQueue().length, 2, 'two tasks waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - t.equal(queue.getQueue()[1], 43, 'should be equal') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift', function (t) { - t.plan(8) - - var queue = buildQueue(worker, 1) - var expected = [1, 2, 3, 4] - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.unshift(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('push && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.push(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('kill', function (t) { - t.plan(5) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.fail('drain should never be called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.kill() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('killAndDrain', function (t) { - t.plan(6) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.pass('drain has been called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.killAndDrain() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('pause && idle', function (t) { - t.plan(11) - - var queue = buildQueue(worker, 1) - var worked = false - - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.idle(), 'should be idle') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - t.notOk(queue.idle(), 'should not be idle') - - queue.resume() - - t.notOk(queue.paused, 'it should not be paused') - t.notOk(queue.idle(), 'it should not be idle') - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - process.nextTick(cb.bind(null, null, true)) - process.nextTick(function () { - t.ok(queue.idle(), 'is should be idle') - }) - } -}) - -test('push without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.push(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('unshift without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.unshift(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('push with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.push(42, function (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('unshift with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.unshift(42, function (err) { - t.ok(err instanceof Error, 'unshift callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('pause/resume should trigger drain event', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - queue.pause() - queue.drain = function () { - t.pass('drain should be called') - } - - function worker (arg, cb) { - cb(null, true) - } - - queue.resume() -}) - -test('paused flag', function (t) { - t.plan(2) - - var queue = buildQueue(function (arg, cb) { - cb(null) - }, 1) - t.equal(queue.paused, false) - queue.pause() - t.equal(queue.paused, true) -}) diff --git a/node_modules/fastq/test/tsconfig.json b/node_modules/fastq/test/tsconfig.json deleted file mode 100644 index 66e16e9..0000000 --- a/node_modules/fastq/test/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "noEmit": true, - "strict": true - }, - "files": [ - "./example.ts" - ] -} diff --git a/node_modules/fill-range/LICENSE b/node_modules/fill-range/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/fill-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/fill-range/README.md b/node_modules/fill-range/README.md deleted file mode 100644 index 8d756fe..0000000 --- a/node_modules/fill-range/README.md +++ /dev/null @@ -1,237 +0,0 @@ -# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) - -> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save fill-range -``` - -## Usage - -Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. - -```js -const fill = require('fill-range'); -// fill(from, to[, step, options]); - -console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] -console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 -``` - -**Params** - -* `from`: **{String|Number}** the number or letter to start with -* `to`: **{String|Number}** the number or letter to end with -* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. -* `options`: **{Object|Function}**: See all available [options](#options) - -## Examples - -By default, an array of values is returned. - -**Alphabetical ranges** - -```js -console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] -console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] -``` - -**Numerical ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] -``` - -**Negative ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] -console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] -``` - -**Steps (increments)** - -```js -// numerical ranges with increments -console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] -console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] -console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] - -// alphabetical ranges with increments -console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] -``` - -## Options - -### options.step - -**Type**: `number` (formatted as a string or number) - -**Default**: `undefined` - -**Description**: The increment to use for the range. Can be used with letters or numbers. - -**Example(s)** - -```js -// numbers -console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] -console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] -console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] - -// letters -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] -console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] -``` - -### options.strictRanges - -**Type**: `boolean` - -**Default**: `false` - -**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. - -**Example(s)** - -The following are all invalid: - -```js -fill('1.1', '2'); // decimals not supported in ranges -fill('a', '2'); // incompatible range values -fill(1, 10, 'foo'); // invalid "step" argument -``` - -### options.stringify - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Cast all returned values to strings. By default, integers are returned as numbers. - -**Example(s)** - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] -``` - -### options.toRegex - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Create a regex-compatible source string, instead of expanding values to an array. - -**Example(s)** - -```js -// alphabetical range -console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' -// alphabetical with step -console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' -// numerical range -console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' -// numerical range with zero padding -console.log(fill('000001', '100000', { toRegex: true })); -//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' -``` - -### options.transform - -**Type**: `function` - -**Default**: `undefined` - -**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. - -**Example(s)** - -```js -// add zero padding -console.log(fill(1, 5, value => String(value).padStart(4, '0'))); -//=> ['0001', '0002', '0003', '0004', '0005'] -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 116 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [paulmillr](https://github.com/paulmillr) | -| 2 | [realityking](https://github.com/realityking) | -| 2 | [bluelovers](https://github.com/bluelovers) | -| 1 | [edorivai](https://github.com/edorivai) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/fill-range/index.js b/node_modules/fill-range/index.js deleted file mode 100644 index ddb212e..0000000 --- a/node_modules/fill-range/index.js +++ /dev/null @@ -1,248 +0,0 @@ -/*! - * fill-range - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Licensed under the MIT License. - */ - -'use strict'; - -const util = require('util'); -const toRegexRange = require('to-regex-range'); - -const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); - -const transform = toNumber => { - return value => toNumber === true ? Number(value) : String(value); -}; - -const isValidValue = value => { - return typeof value === 'number' || (typeof value === 'string' && value !== ''); -}; - -const isNumber = num => Number.isInteger(+num); - -const zeros = input => { - let value = `${input}`; - let index = -1; - if (value[0] === '-') value = value.slice(1); - if (value === '0') return false; - while (value[++index] === '0'); - return index > 0; -}; - -const stringify = (start, end, options) => { - if (typeof start === 'string' || typeof end === 'string') { - return true; - } - return options.stringify === true; -}; - -const pad = (input, maxLength, toNumber) => { - if (maxLength > 0) { - let dash = input[0] === '-' ? '-' : ''; - if (dash) input = input.slice(1); - input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); - } - if (toNumber === false) { - return String(input); - } - return input; -}; - -const toMaxLen = (input, maxLength) => { - let negative = input[0] === '-' ? '-' : ''; - if (negative) { - input = input.slice(1); - maxLength--; - } - while (input.length < maxLength) input = '0' + input; - return negative ? ('-' + input) : input; -}; - -const toSequence = (parts, options, maxLen) => { - parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - - let prefix = options.capture ? '' : '?:'; - let positives = ''; - let negatives = ''; - let result; - - if (parts.positives.length) { - positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); - } - - if (parts.negatives.length) { - negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; - } - - if (positives && negatives) { - result = `${positives}|${negatives}`; - } else { - result = positives || negatives; - } - - if (options.wrap) { - return `(${prefix}${result})`; - } - - return result; -}; - -const toRange = (a, b, isNumbers, options) => { - if (isNumbers) { - return toRegexRange(a, b, { wrap: false, ...options }); - } - - let start = String.fromCharCode(a); - if (a === b) return start; - - let stop = String.fromCharCode(b); - return `[${start}-${stop}]`; -}; - -const toRegex = (start, end, options) => { - if (Array.isArray(start)) { - let wrap = options.wrap === true; - let prefix = options.capture ? '' : '?:'; - return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); - } - return toRegexRange(start, end, options); -}; - -const rangeError = (...args) => { - return new RangeError('Invalid range arguments: ' + util.inspect(...args)); -}; - -const invalidRange = (start, end, options) => { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; -}; - -const invalidStep = (step, options) => { - if (options.strictRanges === true) { - throw new TypeError(`Expected step "${step}" to be a number`); - } - return []; -}; - -const fillNumbers = (start, end, step = 1, options = {}) => { - let a = Number(start); - let b = Number(end); - - if (!Number.isInteger(a) || !Number.isInteger(b)) { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; - } - - // fix negative zero - if (a === 0) a = 0; - if (b === 0) b = 0; - - let descending = a > b; - let startString = String(start); - let endString = String(end); - let stepString = String(step); - step = Math.max(Math.abs(step), 1); - - let padded = zeros(startString) || zeros(endString) || zeros(stepString); - let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; - let toNumber = padded === false && stringify(start, end, options) === false; - let format = options.transform || transform(toNumber); - - if (options.toRegex && step === 1) { - return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); - } - - let parts = { negatives: [], positives: [] }; - let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - if (options.toRegex === true && step > 1) { - push(a); - } else { - range.push(pad(format(a, index), maxLen, toNumber)); - } - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return step > 1 - ? toSequence(parts, options, maxLen) - : toRegex(range, null, { wrap: false, ...options }); - } - - return range; -}; - -const fillLetters = (start, end, step = 1, options = {}) => { - if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { - return invalidRange(start, end, options); - } - - let format = options.transform || (val => String.fromCharCode(val)); - let a = `${start}`.charCodeAt(0); - let b = `${end}`.charCodeAt(0); - - let descending = a > b; - let min = Math.min(a, b); - let max = Math.max(a, b); - - if (options.toRegex && step === 1) { - return toRange(min, max, false, options); - } - - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - range.push(format(a, index)); - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return toRegex(range, null, { wrap: false, options }); - } - - return range; -}; - -const fill = (start, end, step, options = {}) => { - if (end == null && isValidValue(start)) { - return [start]; - } - - if (!isValidValue(start) || !isValidValue(end)) { - return invalidRange(start, end, options); - } - - if (typeof step === 'function') { - return fill(start, end, 1, { transform: step }); - } - - if (isObject(step)) { - return fill(start, end, 0, step); - } - - let opts = { ...options }; - if (opts.capture === true) opts.wrap = true; - step = step || opts.step || 1; - - if (!isNumber(step)) { - if (step != null && !isObject(step)) return invalidStep(step, opts); - return fill(start, end, 1, step); - } - - if (isNumber(start) && isNumber(end)) { - return fillNumbers(start, end, step, opts); - } - - return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); -}; - -module.exports = fill; diff --git a/node_modules/fill-range/package.json b/node_modules/fill-range/package.json deleted file mode 100644 index 582357f..0000000 --- a/node_modules/fill-range/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "fill-range", - "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", - "version": "7.1.1", - "homepage": "https://github.com/jonschlinkert/fill-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Edo Rivai (edo.rivai.nl)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Paul Miller (paulmillr.com)", - "Rouven Weßling (www.rouvenwessling.de)", - "(https://github.com/wtgtybhertgeghgtwtg)" - ], - "repository": "jonschlinkert/fill-range", - "bugs": { - "url": "https://github.com/jonschlinkert/fill-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "devDependencies": { - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1", - "nyc": "^15.1.0" - }, - "keywords": [ - "alpha", - "alphabetical", - "array", - "bash", - "brace", - "expand", - "expansion", - "fill", - "glob", - "match", - "matches", - "matching", - "number", - "numerical", - "range", - "ranges", - "regex", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/get-stream/buffer-stream.js b/node_modules/get-stream/buffer-stream.js deleted file mode 100644 index 2dd7574..0000000 --- a/node_modules/get-stream/buffer-stream.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const {PassThrough: PassThroughStream} = require('stream'); - -module.exports = options => { - options = {...options}; - - const {array} = options; - let {encoding} = options; - const isBuffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || isBuffer); - } else { - encoding = encoding || 'utf8'; - } - - if (isBuffer) { - encoding = null; - } - - const stream = new PassThroughStream({objectMode}); - - if (encoding) { - stream.setEncoding(encoding); - } - - let length = 0; - const chunks = []; - - stream.on('data', chunk => { - chunks.push(chunk); - - if (objectMode) { - length = chunks.length; - } else { - length += chunk.length; - } - }); - - stream.getBufferedValue = () => { - if (array) { - return chunks; - } - - return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); - }; - - stream.getBufferedLength = () => length; - - return stream; -}; diff --git a/node_modules/get-stream/index.d.ts b/node_modules/get-stream/index.d.ts deleted file mode 100644 index 9485b2b..0000000 --- a/node_modules/get-stream/index.d.ts +++ /dev/null @@ -1,105 +0,0 @@ -/// -import {Stream} from 'stream'; - -declare class MaxBufferErrorClass extends Error { - readonly name: 'MaxBufferError'; - constructor(); -} - -declare namespace getStream { - interface Options { - /** - Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error. - - @default Infinity - */ - readonly maxBuffer?: number; - } - - interface OptionsWithEncoding extends Options { - /** - [Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. - - @default 'utf8' - */ - readonly encoding?: EncodingType; - } - - type MaxBufferError = MaxBufferErrorClass; -} - -declare const getStream: { - /** - Get the `stream` as a string. - - @returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. - - @example - ``` - import * as fs from 'fs'; - import getStream = require('get-stream'); - - (async () => { - const stream = fs.createReadStream('unicorn.txt'); - - console.log(await getStream(stream)); - // ,,))))))));, - // __)))))))))))))), - // \|/ -\(((((''''((((((((. - // -*-==//////(('' . `)))))), - // /|\ ))| o ;-. '((((( ,(, - // ( `| / ) ;))))' ,_))^;(~ - // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ - // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ - // ; ''''```` `: `:::|\,__,%% );`'; ~ - // | _ ) / `:|`----' `-' - // ______/\/~ | / / - // /~;;.____/;;' / ___--,-( `;;;/ - // / // _;______;'------~~~~~ /;;/\ / - // // | | / ; \;;,\ - // (<_ | ; /',/-----' _> - // \_| ||_ //~;~~~~~~~~~ - // `\_| (,~~ - // \~\ - // ~~ - })(); - ``` - */ - (stream: Stream, options?: getStream.OptionsWithEncoding): Promise; - - /** - Get the `stream` as a buffer. - - It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. - */ - buffer( - stream: Stream, - options?: getStream.Options - ): Promise; - - /** - Get the `stream` as an array of values. - - It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: - - - When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). - - When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. - - When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. - */ - array( - stream: Stream, - options?: getStream.Options - ): Promise; - array( - stream: Stream, - options: getStream.OptionsWithEncoding<'buffer'> - ): Promise; - array( - stream: Stream, - options: getStream.OptionsWithEncoding - ): Promise; - - MaxBufferError: typeof MaxBufferErrorClass; -}; - -export = getStream; diff --git a/node_modules/get-stream/index.js b/node_modules/get-stream/index.js deleted file mode 100644 index 1c5d028..0000000 --- a/node_modules/get-stream/index.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; -const {constants: BufferConstants} = require('buffer'); -const stream = require('stream'); -const {promisify} = require('util'); -const bufferStream = require('./buffer-stream'); - -const streamPipelinePromisified = promisify(stream.pipeline); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -async function getStream(inputStream, options) { - if (!inputStream) { - throw new Error('Expected a stream'); - } - - options = { - maxBuffer: Infinity, - ...options - }; - - const {maxBuffer} = options; - const stream = bufferStream(options); - - await new Promise((resolve, reject) => { - const rejectPromise = error => { - // Don't retrieve an oversized buffer. - if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { - error.bufferedData = stream.getBufferedValue(); - } - - reject(error); - }; - - (async () => { - try { - await streamPipelinePromisified(inputStream, stream); - resolve(); - } catch (error) { - rejectPromise(error); - } - })(); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }); - - return stream.getBufferedValue(); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); -module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); -module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/get-stream/license b/node_modules/get-stream/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/get-stream/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/get-stream/package.json b/node_modules/get-stream/package.json deleted file mode 100644 index bd47a75..0000000 --- a/node_modules/get-stream/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "get-stream", - "version": "6.0.1", - "description": "Get a stream as a string, buffer, or array", - "license": "MIT", - "repository": "sindresorhus/get-stream", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=10" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "buffer-stream.js" - ], - "keywords": [ - "get", - "stream", - "promise", - "concat", - "string", - "text", - "buffer", - "read", - "data", - "consume", - "readable", - "readablestream", - "array", - "object" - ], - "devDependencies": { - "@types/node": "^14.0.27", - "ava": "^2.4.0", - "into-stream": "^5.0.0", - "tsd": "^0.13.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/get-stream/readme.md b/node_modules/get-stream/readme.md deleted file mode 100644 index 70b01fd..0000000 --- a/node_modules/get-stream/readme.md +++ /dev/null @@ -1,124 +0,0 @@ -# get-stream - -> Get a stream as a string, buffer, or array - -## Install - -``` -$ npm install get-stream -``` - -## Usage - -```js -const fs = require('fs'); -const getStream = require('get-stream'); - -(async () => { - const stream = fs.createReadStream('unicorn.txt'); - - console.log(await getStream(stream)); - /* - ,,))))))));, - __)))))))))))))), - \|/ -\(((((''''((((((((. - -*-==//////(('' . `)))))), - /|\ ))| o ;-. '((((( ,(, - ( `| / ) ;))))' ,_))^;(~ - | | | ,))((((_ _____------~~~-. %,;(;(>';'~ - o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ - ; ''''```` `: `:::|\,__,%% );`'; ~ - | _ ) / `:|`----' `-' - ______/\/~ | / / - /~;;.____/;;' / ___--,-( `;;;/ - / // _;______;'------~~~~~ /;;/\ / - // | | / ; \;;,\ - (<_ | ; /',/-----' _> - \_| ||_ //~;~~~~~~~~~ - `\_| (,~~ - \~\ - ~~ - */ -})(); -``` - -## API - -The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. - -### getStream(stream, options?) - -Get the `stream` as a string. - -#### options - -Type: `object` - -##### encoding - -Type: `string`\ -Default: `'utf8'` - -[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. - -##### maxBuffer - -Type: `number`\ -Default: `Infinity` - -Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. - -### getStream.buffer(stream, options?) - -Get the `stream` as a buffer. - -It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. - -### getStream.array(stream, options?) - -Get the `stream` as an array of values. - -It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: - -- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). - -- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. - -- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. - -## Errors - -If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. - -```js -(async () => { - try { - await getStream(streamThatErrorsAtTheEnd('unicorn')); - } catch (error) { - console.log(error.bufferedData); - //=> 'unicorn' - } -})() -``` - -## FAQ - -### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? - -This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. - -## Related - -- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/glob-parent/CHANGELOG.md b/node_modules/glob-parent/CHANGELOG.md deleted file mode 100644 index fb9de96..0000000 --- a/node_modules/glob-parent/CHANGELOG.md +++ /dev/null @@ -1,110 +0,0 @@ -### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) - - -### Bug Fixes - -* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) - -### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) - - -### Bug Fixes - -* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) - -## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) - - -### Features - -* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) - -## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* Drop support for node <6 & bump dependencies - -### Miscellaneous Chores - -* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) - -## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* question marks are valid path characters on Windows so avoid flagging as a glob when alone -* Update is-glob dependency - -### Features - -* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) -* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) -* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) - -## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) - - -### Features - -* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) -* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) -* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) -* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) -* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) - - -### Bug Fixes - -* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) - -### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) - - -### Features - -* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) - - -### Bug Fixes - -* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) - -## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* update is-glob dependency - -### Features - -* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) - -## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) - - -### Features - -* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) - -## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) - -## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) - - -### Reverts - -* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) - -## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) - - -### Features - -* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) - -## 1.0.0 (2021-01-27) - diff --git a/node_modules/glob-parent/LICENSE b/node_modules/glob-parent/LICENSE deleted file mode 100644 index 63222d7..0000000 --- a/node_modules/glob-parent/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2015, 2019 Elan Shanker - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/glob-parent/README.md b/node_modules/glob-parent/README.md deleted file mode 100644 index 36a2793..0000000 --- a/node_modules/glob-parent/README.md +++ /dev/null @@ -1,137 +0,0 @@ -

- - - -

- -# glob-parent - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] - -Extract the non-magic parent path from a glob string. - -## Usage - -```js -var globParent = require('glob-parent'); - -globParent('path/to/*.js'); // 'path/to' -globParent('/root/path/to/*.js'); // '/root/path/to' -globParent('/*.js'); // '/' -globParent('*.js'); // '.' -globParent('**/*.js'); // '.' -globParent('path/{to,from}'); // 'path' -globParent('path/!(to|from)'); // 'path' -globParent('path/?(to|from)'); // 'path' -globParent('path/+(to|from)'); // 'path' -globParent('path/*(to|from)'); // 'path' -globParent('path/@(to|from)'); // 'path' -globParent('path/**/*'); // 'path' - -// if provided a non-glob path, returns the nearest dir -globParent('path/foo/bar.js'); // 'path/foo' -globParent('path/foo/'); // 'path/foo' -globParent('path/foo'); // 'path' (see issue #3 for details) -``` - -## API - -### `globParent(maybeGlobString, [options])` - -Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. - -#### options - -```js -{ - // Disables the automatic conversion of slashes for Windows - flipBackslashes: true -} -``` - -## Escaping - -The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: - -- `?` (question mark) unless used as a path segment alone -- `*` (asterisk) -- `|` (pipe) -- `(` (opening parenthesis) -- `)` (closing parenthesis) -- `{` (opening curly brace) -- `}` (closing curly brace) -- `[` (opening bracket) -- `]` (closing bracket) - -**Example** - -```js -globParent('foo/[bar]/') // 'foo' -globParent('foo/\\[bar]/') // 'foo/[bar]' -``` - -## Limitations - -### Braces & Brackets -This library attempts a quick and imperfect method of determining which path -parts have glob magic without fully parsing/lexing the pattern. There are some -advanced use cases that can trip it up, such as nested braces where the outer -pair is escaped and the inner one contains a path separator. If you find -yourself in the unlikely circumstance of being affected by this or need to -ensure higher-fidelity glob handling in your library, it is recommended that you -pre-process your input with [expand-braces] and/or [expand-brackets]. - -### Windows -Backslashes are not valid path separators for globs. If a path with backslashes -is provided anyway, for simple cases, glob-parent will replace the path -separator for you and return the non-glob parent path (now with -forward-slashes, which are still valid as Windows path separators). - -This cannot be used in conjunction with escape characters. - -```js -// BAD -globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' - -// GOOD -globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' -``` - -If you are using escape characters for a pattern without path parts (i.e. -relative to `cwd`), prefix with `./` to avoid confusing glob-parent. - -```js -// BAD -globParent('foo \\[bar]') // 'foo ' -globParent('foo \\[bar]*') // 'foo ' - -// GOOD -globParent('./foo \\[bar]') // 'foo [bar]' -globParent('./foo \\[bar]*') // '.' -``` - -## License - -ISC - -[expand-braces]: https://github.com/jonschlinkert/expand-braces -[expand-brackets]: https://github.com/jonschlinkert/expand-brackets - -[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg -[npm-url]: https://www.npmjs.com/package/glob-parent -[npm-image]: https://img.shields.io/npm/v/glob-parent.svg - -[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master -[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master - -[travis-url]: https://travis-ci.org/gulpjs/glob-parent -[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci - -[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent -[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor - -[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent -[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg - -[gitter-url]: https://gitter.im/gulpjs/gulp -[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/glob-parent/index.js b/node_modules/glob-parent/index.js deleted file mode 100644 index 09e257e..0000000 --- a/node_modules/glob-parent/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -var isGlob = require('is-glob'); -var pathPosixDirname = require('path').posix.dirname; -var isWin32 = require('os').platform() === 'win32'; - -var slash = '/'; -var backslash = /\\/g; -var enclosure = /[\{\[].*[\}\]]$/; -var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; -var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - -/** - * @param {string} str - * @param {Object} opts - * @param {boolean} [opts.flipBackslashes=true] - * @returns {string} - */ -module.exports = function globParent(str, opts) { - var options = Object.assign({ flipBackslashes: true }, opts); - - // flip windows path separators - if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { - str = str.replace(backslash, slash); - } - - // special case for strings ending in enclosure containing path separator - if (enclosure.test(str)) { - str += slash; - } - - // preserves full path in case of trailing path separator - str += 'a'; - - // remove path parts that are globby - do { - str = pathPosixDirname(str); - } while (isGlob(str) || globby.test(str)); - - // remove escape chars and return result - return str.replace(escaped, '$1'); -}; diff --git a/node_modules/glob-parent/package.json b/node_modules/glob-parent/package.json deleted file mode 100644 index 125c971..0000000 --- a/node_modules/glob-parent/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "glob-parent", - "version": "5.1.2", - "description": "Extract the non-magic parent path from a glob string.", - "author": "Gulp Team (https://gulpjs.com/)", - "contributors": [ - "Elan Shanker (https://github.com/es128)", - "Blaine Bublitz " - ], - "repository": "gulpjs/glob-parent", - "license": "ISC", - "engines": { - "node": ">= 6" - }, - "main": "index.js", - "files": [ - "LICENSE", - "index.js" - ], - "scripts": { - "lint": "eslint .", - "pretest": "npm run lint", - "test": "nyc mocha --async-only", - "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", - "coveralls": "nyc report --reporter=text-lcov | coveralls" - }, - "dependencies": { - "is-glob": "^4.0.1" - }, - "devDependencies": { - "coveralls": "^3.0.11", - "eslint": "^2.13.1", - "eslint-config-gulp": "^3.0.1", - "expect": "^1.20.2", - "mocha": "^6.0.2", - "nyc": "^13.3.0" - }, - "keywords": [ - "glob", - "parent", - "strip", - "path", - "dirname", - "directory", - "base", - "wildcard" - ] -} diff --git a/node_modules/human-signals/CHANGELOG.md b/node_modules/human-signals/CHANGELOG.md deleted file mode 100644 index 70d0392..0000000 --- a/node_modules/human-signals/CHANGELOG.md +++ /dev/null @@ -1,11 +0,0 @@ -# 2.1.0 - -## TypeScript types - -- Add [TypeScript definitions](src/main.d.ts) - -# 2.0.0 - -## Breaking changes - -- Minimal supported Node.js version is now `10.17.0` diff --git a/node_modules/human-signals/LICENSE b/node_modules/human-signals/LICENSE deleted file mode 100644 index 9af9492..0000000 --- a/node_modules/human-signals/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 ehmicky - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/human-signals/README.md b/node_modules/human-signals/README.md deleted file mode 100644 index 2af37c3..0000000 --- a/node_modules/human-signals/README.md +++ /dev/null @@ -1,165 +0,0 @@ -[![Codecov](https://img.shields.io/codecov/c/github/ehmicky/human-signals.svg?label=tested&logo=codecov)](https://codecov.io/gh/ehmicky/human-signals) -[![Travis](https://img.shields.io/badge/cross-platform-4cc61e.svg?logo=travis)](https://travis-ci.org/ehmicky/human-signals) -[![Node](https://img.shields.io/node/v/human-signals.svg?logo=node.js)](https://www.npmjs.com/package/human-signals) -[![Gitter](https://img.shields.io/gitter/room/ehmicky/human-signals.svg?logo=gitter)](https://gitter.im/ehmicky/human-signals) -[![Twitter](https://img.shields.io/badge/%E2%80%8B-twitter-4cc61e.svg?logo=twitter)](https://twitter.com/intent/follow?screen_name=ehmicky) -[![Medium](https://img.shields.io/badge/%E2%80%8B-medium-4cc61e.svg?logo=medium)](https://medium.com/@ehmicky) - -Human-friendly process signals. - -This is a map of known process signals with some information about each signal. - -Unlike -[`os.constants.signals`](https://nodejs.org/api/os.html#os_signal_constants) -this includes: - -- human-friendly [descriptions](#description) -- [default actions](#action), including whether they [can be prevented](#forced) -- whether the signal is [supported](#supported) by the current OS - -# Example - -```js -const { signalsByName, signalsByNumber } = require('human-signals') - -console.log(signalsByName.SIGINT) -// { -// name: 'SIGINT', -// number: 2, -// description: 'User interruption with CTRL-C', -// supported: true, -// action: 'terminate', -// forced: false, -// standard: 'ansi' -// } - -console.log(signalsByNumber[8]) -// { -// name: 'SIGFPE', -// number: 8, -// description: 'Floating point arithmetic error', -// supported: true, -// action: 'core', -// forced: false, -// standard: 'ansi' -// } -``` - -# Install - -```bash -npm install human-signals -``` - -# Usage - -## signalsByName - -_Type_: `object` - -Object whose keys are signal [names](#name) and values are -[signal objects](#signal). - -## signalsByNumber - -_Type_: `object` - -Object whose keys are signal [numbers](#number) and values are -[signal objects](#signal). - -## signal - -_Type_: `object` - -Signal object with the following properties. - -### name - -_Type_: `string` - -Standard name of the signal, for example `'SIGINT'`. - -### number - -_Type_: `number` - -Code number of the signal, for example `2`. While most `number` are -cross-platform, some are different between different OS. - -### description - -_Type_: `string` - -Human-friendly description for the signal, for example -`'User interruption with CTRL-C'`. - -### supported - -_Type_: `boolean` - -Whether the current OS can handle this signal in Node.js using -[`process.on(name, handler)`](https://nodejs.org/api/process.html#process_signal_events). - -The list of supported signals -[is OS-specific](https://github.com/ehmicky/cross-platform-node-guide/blob/master/docs/6_networking_ipc/signals.md#cross-platform-signals). - -### action - -_Type_: `string`\ -_Enum_: `'terminate'`, `'core'`, `'ignore'`, `'pause'`, `'unpause'` - -What is the default action for this signal when it is not handled. - -### forced - -_Type_: `boolean` - -Whether the signal's default action cannot be prevented. This is `true` for -`SIGTERM`, `SIGKILL` and `SIGSTOP`. - -### standard - -_Type_: `string`\ -_Enum_: `'ansi'`, `'posix'`, `'bsd'`, `'systemv'`, `'other'` - -Which standard defined that signal. - -# Support - -If you found a bug or would like a new feature, _don't hesitate_ to -[submit an issue on GitHub](../../issues). - -For other questions, feel free to -[chat with us on Gitter](https://gitter.im/ehmicky/human-signals). - -Everyone is welcome regardless of personal background. We enforce a -[Code of conduct](CODE_OF_CONDUCT.md) in order to promote a positive and -inclusive environment. - -# Contributing - -This project was made with ❤️. The simplest way to give back is by starring and -sharing it online. - -If the documentation is unclear or has a typo, please click on the page's `Edit` -button (pencil icon) and suggest a correction. - -If you would like to help us fix a bug or add a new feature, please check our -[guidelines](CONTRIBUTING.md). Pull requests are welcome! - -Thanks go to our wonderful contributors: - - - - - - - - - -

ehmicky

💻 🎨 🤔 📖

electrovir

💻
- - - - - diff --git a/node_modules/human-signals/build/src/core.js b/node_modules/human-signals/build/src/core.js deleted file mode 100644 index 98e8fce..0000000 --- a/node_modules/human-signals/build/src/core.js +++ /dev/null @@ -1,273 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGNALS=void 0; - -const SIGNALS=[ -{ -name:"SIGHUP", -number:1, -action:"terminate", -description:"Terminal closed", -standard:"posix"}, - -{ -name:"SIGINT", -number:2, -action:"terminate", -description:"User interruption with CTRL-C", -standard:"ansi"}, - -{ -name:"SIGQUIT", -number:3, -action:"core", -description:"User interruption with CTRL-\\", -standard:"posix"}, - -{ -name:"SIGILL", -number:4, -action:"core", -description:"Invalid machine instruction", -standard:"ansi"}, - -{ -name:"SIGTRAP", -number:5, -action:"core", -description:"Debugger breakpoint", -standard:"posix"}, - -{ -name:"SIGABRT", -number:6, -action:"core", -description:"Aborted", -standard:"ansi"}, - -{ -name:"SIGIOT", -number:6, -action:"core", -description:"Aborted", -standard:"bsd"}, - -{ -name:"SIGBUS", -number:7, -action:"core", -description: -"Bus error due to misaligned, non-existing address or paging error", -standard:"bsd"}, - -{ -name:"SIGEMT", -number:7, -action:"terminate", -description:"Command should be emulated but is not implemented", -standard:"other"}, - -{ -name:"SIGFPE", -number:8, -action:"core", -description:"Floating point arithmetic error", -standard:"ansi"}, - -{ -name:"SIGKILL", -number:9, -action:"terminate", -description:"Forced termination", -standard:"posix", -forced:true}, - -{ -name:"SIGUSR1", -number:10, -action:"terminate", -description:"Application-specific signal", -standard:"posix"}, - -{ -name:"SIGSEGV", -number:11, -action:"core", -description:"Segmentation fault", -standard:"ansi"}, - -{ -name:"SIGUSR2", -number:12, -action:"terminate", -description:"Application-specific signal", -standard:"posix"}, - -{ -name:"SIGPIPE", -number:13, -action:"terminate", -description:"Broken pipe or socket", -standard:"posix"}, - -{ -name:"SIGALRM", -number:14, -action:"terminate", -description:"Timeout or timer", -standard:"posix"}, - -{ -name:"SIGTERM", -number:15, -action:"terminate", -description:"Termination", -standard:"ansi"}, - -{ -name:"SIGSTKFLT", -number:16, -action:"terminate", -description:"Stack is empty or overflowed", -standard:"other"}, - -{ -name:"SIGCHLD", -number:17, -action:"ignore", -description:"Child process terminated, paused or unpaused", -standard:"posix"}, - -{ -name:"SIGCLD", -number:17, -action:"ignore", -description:"Child process terminated, paused or unpaused", -standard:"other"}, - -{ -name:"SIGCONT", -number:18, -action:"unpause", -description:"Unpaused", -standard:"posix", -forced:true}, - -{ -name:"SIGSTOP", -number:19, -action:"pause", -description:"Paused", -standard:"posix", -forced:true}, - -{ -name:"SIGTSTP", -number:20, -action:"pause", -description:"Paused using CTRL-Z or \"suspend\"", -standard:"posix"}, - -{ -name:"SIGTTIN", -number:21, -action:"pause", -description:"Background process cannot read terminal input", -standard:"posix"}, - -{ -name:"SIGBREAK", -number:21, -action:"terminate", -description:"User interruption with CTRL-BREAK", -standard:"other"}, - -{ -name:"SIGTTOU", -number:22, -action:"pause", -description:"Background process cannot write to terminal output", -standard:"posix"}, - -{ -name:"SIGURG", -number:23, -action:"ignore", -description:"Socket received out-of-band data", -standard:"bsd"}, - -{ -name:"SIGXCPU", -number:24, -action:"core", -description:"Process timed out", -standard:"bsd"}, - -{ -name:"SIGXFSZ", -number:25, -action:"core", -description:"File too big", -standard:"bsd"}, - -{ -name:"SIGVTALRM", -number:26, -action:"terminate", -description:"Timeout or timer", -standard:"bsd"}, - -{ -name:"SIGPROF", -number:27, -action:"terminate", -description:"Timeout or timer", -standard:"bsd"}, - -{ -name:"SIGWINCH", -number:28, -action:"ignore", -description:"Terminal window size changed", -standard:"bsd"}, - -{ -name:"SIGIO", -number:29, -action:"terminate", -description:"I/O is available", -standard:"other"}, - -{ -name:"SIGPOLL", -number:29, -action:"terminate", -description:"Watched event", -standard:"other"}, - -{ -name:"SIGINFO", -number:29, -action:"ignore", -description:"Request for process information", -standard:"other"}, - -{ -name:"SIGPWR", -number:30, -action:"terminate", -description:"Device running out of power", -standard:"systemv"}, - -{ -name:"SIGSYS", -number:31, -action:"core", -description:"Invalid system call", -standard:"other"}, - -{ -name:"SIGUNUSED", -number:31, -action:"terminate", -description:"Invalid system call", -standard:"other"}];exports.SIGNALS=SIGNALS; -//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/core.js.map b/node_modules/human-signals/build/src/core.js.map deleted file mode 100644 index cbfce26..0000000 --- a/node_modules/human-signals/build/src/core.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/core.js"],"names":["SIGNALS","name","number","action","description","standard","forced"],"mappings":";;AAEO,KAAMA,CAAAA,OAAO,CAAG;AACrB;AACEC,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,iBAJf;AAKEC,QAAQ,CAAE,OALZ,CADqB;;AAQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,+BAJf;AAKEC,QAAQ,CAAE,MALZ,CARqB;;AAerB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,gCAJf;AAKEC,QAAQ,CAAE,OALZ,CAfqB;;AAsBrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,MALZ,CAtBqB;;AA6BrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CA7BqB;;AAoCrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,MALZ,CApCqB;;AA2CrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,KALZ,CA3CqB;;AAkDrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW;AACT,mEALJ;AAMEC,QAAQ,CAAE,KANZ,CAlDqB;;AA0DrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mDAJf;AAKEC,QAAQ,CAAE,OALZ,CA1DqB;;AAiErB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,MALZ,CAjEqB;;AAwErB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAxEqB;;AAgFrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CAhFqB;;AAuFrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,MALZ,CAvFqB;;AA8FrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CA9FqB;;AAqGrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,uBAJf;AAKEC,QAAQ,CAAE,OALZ,CArGqB;;AA4GrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CA5GqB;;AAmHrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,aAJf;AAKEC,QAAQ,CAAE,MALZ,CAnHqB;;AA0HrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,OALZ,CA1HqB;;AAiIrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAjIqB;;AAwIrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAxIqB;;AA+IrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,SAHV;AAIEC,WAAW,CAAE,UAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CA/IqB;;AAuJrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,QAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAvJqB;;AA+JrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oCAJf;AAKEC,QAAQ,CAAE,OALZ,CA/JqB;;AAsKrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,+CAJf;AAKEC,QAAQ,CAAE,OALZ,CAtKqB;;AA6KrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mCAJf;AAKEC,QAAQ,CAAE,OALZ,CA7KqB;;AAoLrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oDAJf;AAKEC,QAAQ,CAAE,OALZ,CApLqB;;AA2LrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,kCAJf;AAKEC,QAAQ,CAAE,KALZ,CA3LqB;;AAkMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,mBAJf;AAKEC,QAAQ,CAAE,KALZ,CAlMqB;;AAyMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,cAJf;AAKEC,QAAQ,CAAE,KALZ,CAzMqB;;AAgNrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAhNqB;;AAuNrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAvNqB;;AA8NrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,KALZ,CA9NqB;;AAqOrB;AACEJ,IAAI,CAAE,OADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CArOqB;;AA4OrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,eAJf;AAKEC,QAAQ,CAAE,OALZ,CA5OqB;;AAmPrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,OALZ,CAnPqB;;AA0PrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,SALZ,CA1PqB;;AAiQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAjQqB;;AAwQrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAxQqB,CAAhB,C","sourcesContent":["/* eslint-disable max-lines */\n// List of known process signals with information about them\nexport const SIGNALS = [\n {\n name: 'SIGHUP',\n number: 1,\n action: 'terminate',\n description: 'Terminal closed',\n standard: 'posix',\n },\n {\n name: 'SIGINT',\n number: 2,\n action: 'terminate',\n description: 'User interruption with CTRL-C',\n standard: 'ansi',\n },\n {\n name: 'SIGQUIT',\n number: 3,\n action: 'core',\n description: 'User interruption with CTRL-\\\\',\n standard: 'posix',\n },\n {\n name: 'SIGILL',\n number: 4,\n action: 'core',\n description: 'Invalid machine instruction',\n standard: 'ansi',\n },\n {\n name: 'SIGTRAP',\n number: 5,\n action: 'core',\n description: 'Debugger breakpoint',\n standard: 'posix',\n },\n {\n name: 'SIGABRT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'ansi',\n },\n {\n name: 'SIGIOT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'bsd',\n },\n {\n name: 'SIGBUS',\n number: 7,\n action: 'core',\n description:\n 'Bus error due to misaligned, non-existing address or paging error',\n standard: 'bsd',\n },\n {\n name: 'SIGEMT',\n number: 7,\n action: 'terminate',\n description: 'Command should be emulated but is not implemented',\n standard: 'other',\n },\n {\n name: 'SIGFPE',\n number: 8,\n action: 'core',\n description: 'Floating point arithmetic error',\n standard: 'ansi',\n },\n {\n name: 'SIGKILL',\n number: 9,\n action: 'terminate',\n description: 'Forced termination',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGUSR1',\n number: 10,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGSEGV',\n number: 11,\n action: 'core',\n description: 'Segmentation fault',\n standard: 'ansi',\n },\n {\n name: 'SIGUSR2',\n number: 12,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGPIPE',\n number: 13,\n action: 'terminate',\n description: 'Broken pipe or socket',\n standard: 'posix',\n },\n {\n name: 'SIGALRM',\n number: 14,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'posix',\n },\n {\n name: 'SIGTERM',\n number: 15,\n action: 'terminate',\n description: 'Termination',\n standard: 'ansi',\n },\n {\n name: 'SIGSTKFLT',\n number: 16,\n action: 'terminate',\n description: 'Stack is empty or overflowed',\n standard: 'other',\n },\n {\n name: 'SIGCHLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'posix',\n },\n {\n name: 'SIGCLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'other',\n },\n {\n name: 'SIGCONT',\n number: 18,\n action: 'unpause',\n description: 'Unpaused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGSTOP',\n number: 19,\n action: 'pause',\n description: 'Paused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGTSTP',\n number: 20,\n action: 'pause',\n description: 'Paused using CTRL-Z or \"suspend\"',\n standard: 'posix',\n },\n {\n name: 'SIGTTIN',\n number: 21,\n action: 'pause',\n description: 'Background process cannot read terminal input',\n standard: 'posix',\n },\n {\n name: 'SIGBREAK',\n number: 21,\n action: 'terminate',\n description: 'User interruption with CTRL-BREAK',\n standard: 'other',\n },\n {\n name: 'SIGTTOU',\n number: 22,\n action: 'pause',\n description: 'Background process cannot write to terminal output',\n standard: 'posix',\n },\n {\n name: 'SIGURG',\n number: 23,\n action: 'ignore',\n description: 'Socket received out-of-band data',\n standard: 'bsd',\n },\n {\n name: 'SIGXCPU',\n number: 24,\n action: 'core',\n description: 'Process timed out',\n standard: 'bsd',\n },\n {\n name: 'SIGXFSZ',\n number: 25,\n action: 'core',\n description: 'File too big',\n standard: 'bsd',\n },\n {\n name: 'SIGVTALRM',\n number: 26,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGPROF',\n number: 27,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGWINCH',\n number: 28,\n action: 'ignore',\n description: 'Terminal window size changed',\n standard: 'bsd',\n },\n {\n name: 'SIGIO',\n number: 29,\n action: 'terminate',\n description: 'I/O is available',\n standard: 'other',\n },\n {\n name: 'SIGPOLL',\n number: 29,\n action: 'terminate',\n description: 'Watched event',\n standard: 'other',\n },\n {\n name: 'SIGINFO',\n number: 29,\n action: 'ignore',\n description: 'Request for process information',\n standard: 'other',\n },\n {\n name: 'SIGPWR',\n number: 30,\n action: 'terminate',\n description: 'Device running out of power',\n standard: 'systemv',\n },\n {\n name: 'SIGSYS',\n number: 31,\n action: 'core',\n description: 'Invalid system call',\n standard: 'other',\n },\n {\n name: 'SIGUNUSED',\n number: 31,\n action: 'terminate',\n description: 'Invalid system call',\n standard: 'other',\n },\n]\n/* eslint-enable max-lines */\n"],"file":"src/core.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/main.d.ts b/node_modules/human-signals/build/src/main.d.ts deleted file mode 100644 index 2dc5ea7..0000000 --- a/node_modules/human-signals/build/src/main.d.ts +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Object whose keys are signal names and values are signal objects. - */ -export declare const signalsByName: { [signalName: string]: Signal } -/** - * Object whose keys are signal numbers and values are signal objects. - */ -export declare const signalsByNumber: { [signalNumber: string]: Signal } - -export declare type SignalAction = - | 'terminate' - | 'core' - | 'ignore' - | 'pause' - | 'unpause' -export declare type SignalStandard = - | 'ansi' - | 'posix' - | 'bsd' - | 'systemv' - | 'other' - -export declare type Signal = { - /** - * Standard name of the signal, for example 'SIGINT'. - */ - name: string - /** - * Code number of the signal, for example 2. While most number are cross-platform, some are different between different OS. - */ - number: number - /** - * Human-friendly description for the signal, for example 'User interruption with CTRL-C'. - */ - description: string - /** - * Whether the current OS can handle this signal in Node.js using process.on(name, handler). The list of supported signals is OS-specific. - */ - supported: boolean - /** - * What is the default action for this signal when it is not handled. - */ - action: SignalAction - /** - * Whether the signal's default action cannot be prevented. This is true for SIGTERM, SIGKILL and SIGSTOP. - */ - forced: boolean - /** - * Which standard defined that signal. - */ - standard: SignalStandard -} diff --git a/node_modules/human-signals/build/src/main.js b/node_modules/human-signals/build/src/main.js deleted file mode 100644 index 88f5fd2..0000000 --- a/node_modules/human-signals/build/src/main.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.signalsByNumber=exports.signalsByName=void 0;var _os=require("os"); - -var _signals=require("./signals.js"); -var _realtime=require("./realtime.js"); - - - -const getSignalsByName=function(){ -const signals=(0,_signals.getSignals)(); -return signals.reduce(getSignalByName,{}); -}; - -const getSignalByName=function( -signalByNameMemo, -{name,number,description,supported,action,forced,standard}) -{ -return{ -...signalByNameMemo, -[name]:{name,number,description,supported,action,forced,standard}}; - -}; - -const signalsByName=getSignalsByName();exports.signalsByName=signalsByName; - - - - -const getSignalsByNumber=function(){ -const signals=(0,_signals.getSignals)(); -const length=_realtime.SIGRTMAX+1; -const signalsA=Array.from({length},(value,number)=> -getSignalByNumber(number,signals)); - -return Object.assign({},...signalsA); -}; - -const getSignalByNumber=function(number,signals){ -const signal=findSignalByNumber(number,signals); - -if(signal===undefined){ -return{}; -} - -const{name,description,supported,action,forced,standard}=signal; -return{ -[number]:{ -name, -number, -description, -supported, -action, -forced, -standard}}; - - -}; - - - -const findSignalByNumber=function(number,signals){ -const signal=signals.find(({name})=>_os.constants.signals[name]===number); - -if(signal!==undefined){ -return signal; -} - -return signals.find(signalA=>signalA.number===number); -}; - -const signalsByNumber=getSignalsByNumber();exports.signalsByNumber=signalsByNumber; -//# sourceMappingURL=main.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/main.js.map b/node_modules/human-signals/build/src/main.js.map deleted file mode 100644 index 3fdcede..0000000 --- a/node_modules/human-signals/build/src/main.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/main.js"],"names":["getSignalsByName","signals","reduce","getSignalByName","signalByNameMemo","name","number","description","supported","action","forced","standard","signalsByName","getSignalsByNumber","length","SIGRTMAX","signalsA","Array","from","value","getSignalByNumber","Object","assign","signal","findSignalByNumber","undefined","find","constants","signalA","signalsByNumber"],"mappings":"2HAAA;;AAEA;AACA;;;;AAIA,KAAMA,CAAAA,gBAAgB,CAAG,UAAW;AAClC,KAAMC,CAAAA,OAAO,CAAG,yBAAhB;AACA,MAAOA,CAAAA,OAAO,CAACC,MAAR,CAAeC,eAAf,CAAgC,EAAhC,CAAP;AACD,CAHD;;AAKA,KAAMA,CAAAA,eAAe,CAAG;AACtBC,gBADsB;AAEtB,CAAEC,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFsB;AAGtB;AACA,MAAO;AACL,GAAGP,gBADE;AAEL,CAACC,IAAD,EAAQ,CAAEA,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFH,CAAP;;AAID,CARD;;AAUO,KAAMC,CAAAA,aAAa,CAAGZ,gBAAgB,EAAtC,C;;;;;AAKP,KAAMa,CAAAA,kBAAkB,CAAG,UAAW;AACpC,KAAMZ,CAAAA,OAAO,CAAG,yBAAhB;AACA,KAAMa,CAAAA,MAAM,CAAGC,mBAAW,CAA1B;AACA,KAAMC,CAAAA,QAAQ,CAAGC,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuB,CAACK,KAAD,CAAQb,MAAR;AACtCc,iBAAiB,CAACd,MAAD,CAASL,OAAT,CADF,CAAjB;;AAGA,MAAOoB,CAAAA,MAAM,CAACC,MAAP,CAAc,EAAd,CAAkB,GAAGN,QAArB,CAAP;AACD,CAPD;;AASA,KAAMI,CAAAA,iBAAiB,CAAG,SAASd,MAAT,CAAiBL,OAAjB,CAA0B;AAClD,KAAMsB,CAAAA,MAAM,CAAGC,kBAAkB,CAAClB,MAAD,CAASL,OAAT,CAAjC;;AAEA,GAAIsB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAO,EAAP;AACD;;AAED,KAAM,CAAEpB,IAAF,CAAQE,WAAR,CAAqBC,SAArB,CAAgCC,MAAhC,CAAwCC,MAAxC,CAAgDC,QAAhD,EAA6DY,MAAnE;AACA,MAAO;AACL,CAACjB,MAAD,EAAU;AACRD,IADQ;AAERC,MAFQ;AAGRC,WAHQ;AAIRC,SAJQ;AAKRC,MALQ;AAMRC,MANQ;AAORC,QAPQ,CADL,CAAP;;;AAWD,CAnBD;;;;AAuBA,KAAMa,CAAAA,kBAAkB,CAAG,SAASlB,MAAT,CAAiBL,OAAjB,CAA0B;AACnD,KAAMsB,CAAAA,MAAM,CAAGtB,OAAO,CAACyB,IAAR,CAAa,CAAC,CAAErB,IAAF,CAAD,GAAcsB,cAAU1B,OAAV,CAAkBI,IAAlB,IAA4BC,MAAvD,CAAf;;AAEA,GAAIiB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAOF,CAAAA,MAAP;AACD;;AAED,MAAOtB,CAAAA,OAAO,CAACyB,IAAR,CAAaE,OAAO,EAAIA,OAAO,CAACtB,MAAR,GAAmBA,MAA3C,CAAP;AACD,CARD;;AAUO,KAAMuB,CAAAA,eAAe,CAAGhB,kBAAkB,EAA1C,C","sourcesContent":["import { constants } from 'os'\n\nimport { getSignals } from './signals.js'\nimport { SIGRTMAX } from './realtime.js'\n\n// Retrieve `signalsByName`, an object mapping signal name to signal properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByName = function() {\n const signals = getSignals()\n return signals.reduce(getSignalByName, {})\n}\n\nconst getSignalByName = function(\n signalByNameMemo,\n { name, number, description, supported, action, forced, standard },\n) {\n return {\n ...signalByNameMemo,\n [name]: { name, number, description, supported, action, forced, standard },\n }\n}\n\nexport const signalsByName = getSignalsByName()\n\n// Retrieve `signalsByNumber`, an object mapping signal number to signal\n// properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByNumber = function() {\n const signals = getSignals()\n const length = SIGRTMAX + 1\n const signalsA = Array.from({ length }, (value, number) =>\n getSignalByNumber(number, signals),\n )\n return Object.assign({}, ...signalsA)\n}\n\nconst getSignalByNumber = function(number, signals) {\n const signal = findSignalByNumber(number, signals)\n\n if (signal === undefined) {\n return {}\n }\n\n const { name, description, supported, action, forced, standard } = signal\n return {\n [number]: {\n name,\n number,\n description,\n supported,\n action,\n forced,\n standard,\n },\n }\n}\n\n// Several signals might end up sharing the same number because of OS-specific\n// numbers, in which case those prevail.\nconst findSignalByNumber = function(number, signals) {\n const signal = signals.find(({ name }) => constants.signals[name] === number)\n\n if (signal !== undefined) {\n return signal\n }\n\n return signals.find(signalA => signalA.number === number)\n}\n\nexport const signalsByNumber = getSignalsByNumber()\n"],"file":"src/main.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/realtime.js b/node_modules/human-signals/build/src/realtime.js deleted file mode 100644 index f665516..0000000 --- a/node_modules/human-signals/build/src/realtime.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGRTMAX=exports.getRealtimeSignals=void 0; -const getRealtimeSignals=function(){ -const length=SIGRTMAX-SIGRTMIN+1; -return Array.from({length},getRealtimeSignal); -};exports.getRealtimeSignals=getRealtimeSignals; - -const getRealtimeSignal=function(value,index){ -return{ -name:`SIGRT${index+1}`, -number:SIGRTMIN+index, -action:"terminate", -description:"Application-specific signal (realtime)", -standard:"posix"}; - -}; - -const SIGRTMIN=34; -const SIGRTMAX=64;exports.SIGRTMAX=SIGRTMAX; -//# sourceMappingURL=realtime.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/realtime.js.map b/node_modules/human-signals/build/src/realtime.js.map deleted file mode 100644 index 808bbd1..0000000 --- a/node_modules/human-signals/build/src/realtime.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/realtime.js"],"names":["getRealtimeSignals","length","SIGRTMAX","SIGRTMIN","Array","from","getRealtimeSignal","value","index","name","number","action","description","standard"],"mappings":";AACO,KAAMA,CAAAA,kBAAkB,CAAG,UAAW;AAC3C,KAAMC,CAAAA,MAAM,CAAGC,QAAQ,CAAGC,QAAX,CAAsB,CAArC;AACA,MAAOC,CAAAA,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuBK,iBAAvB,CAAP;AACD,CAHM,C;;AAKP,KAAMA,CAAAA,iBAAiB,CAAG,SAASC,KAAT,CAAgBC,KAAhB,CAAuB;AAC/C,MAAO;AACLC,IAAI,CAAG,QAAOD,KAAK,CAAG,CAAE,EADnB;AAELE,MAAM,CAAEP,QAAQ,CAAGK,KAFd;AAGLG,MAAM,CAAE,WAHH;AAILC,WAAW,CAAE,wCAJR;AAKLC,QAAQ,CAAE,OALL,CAAP;;AAOD,CARD;;AAUA,KAAMV,CAAAA,QAAQ,CAAG,EAAjB;AACO,KAAMD,CAAAA,QAAQ,CAAG,EAAjB,C","sourcesContent":["// List of realtime signals with information about them\nexport const getRealtimeSignals = function() {\n const length = SIGRTMAX - SIGRTMIN + 1\n return Array.from({ length }, getRealtimeSignal)\n}\n\nconst getRealtimeSignal = function(value, index) {\n return {\n name: `SIGRT${index + 1}`,\n number: SIGRTMIN + index,\n action: 'terminate',\n description: 'Application-specific signal (realtime)',\n standard: 'posix',\n }\n}\n\nconst SIGRTMIN = 34\nexport const SIGRTMAX = 64\n"],"file":"src/realtime.js"} \ No newline at end of file diff --git a/node_modules/human-signals/build/src/signals.js b/node_modules/human-signals/build/src/signals.js deleted file mode 100644 index ab3b387..0000000 --- a/node_modules/human-signals/build/src/signals.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.getSignals=void 0;var _os=require("os"); - -var _core=require("./core.js"); -var _realtime=require("./realtime.js"); - - - -const getSignals=function(){ -const realtimeSignals=(0,_realtime.getRealtimeSignals)(); -const signals=[..._core.SIGNALS,...realtimeSignals].map(normalizeSignal); -return signals; -};exports.getSignals=getSignals; - - - - - - - -const normalizeSignal=function({ -name, -number:defaultNumber, -description, -action, -forced=false, -standard}) -{ -const{ -signals:{[name]:constantSignal}}= -_os.constants; -const supported=constantSignal!==undefined; -const number=supported?constantSignal:defaultNumber; -return{name,number,description,supported,action,forced,standard}; -}; -//# sourceMappingURL=signals.js.map \ No newline at end of file diff --git a/node_modules/human-signals/build/src/signals.js.map b/node_modules/human-signals/build/src/signals.js.map deleted file mode 100644 index 2a6b919..0000000 --- a/node_modules/human-signals/build/src/signals.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/signals.js"],"names":["getSignals","realtimeSignals","signals","SIGNALS","map","normalizeSignal","name","number","defaultNumber","description","action","forced","standard","constantSignal","constants","supported","undefined"],"mappings":"gGAAA;;AAEA;AACA;;;;AAIO,KAAMA,CAAAA,UAAU,CAAG,UAAW;AACnC,KAAMC,CAAAA,eAAe,CAAG,kCAAxB;AACA,KAAMC,CAAAA,OAAO,CAAG,CAAC,GAAGC,aAAJ,CAAa,GAAGF,eAAhB,EAAiCG,GAAjC,CAAqCC,eAArC,CAAhB;AACA,MAAOH,CAAAA,OAAP;AACD,CAJM,C;;;;;;;;AAYP,KAAMG,CAAAA,eAAe,CAAG,SAAS;AAC/BC,IAD+B;AAE/BC,MAAM,CAAEC,aAFuB;AAG/BC,WAH+B;AAI/BC,MAJ+B;AAK/BC,MAAM,CAAG,KALsB;AAM/BC,QAN+B,CAAT;AAOrB;AACD,KAAM;AACJV,OAAO,CAAE,CAAE,CAACI,IAAD,EAAQO,cAAV,CADL;AAEFC,aAFJ;AAGA,KAAMC,CAAAA,SAAS,CAAGF,cAAc,GAAKG,SAArC;AACA,KAAMT,CAAAA,MAAM,CAAGQ,SAAS,CAAGF,cAAH,CAAoBL,aAA5C;AACA,MAAO,CAAEF,IAAF,CAAQC,MAAR,CAAgBE,WAAhB,CAA6BM,SAA7B,CAAwCL,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAAP;AACD,CAdD","sourcesContent":["import { constants } from 'os'\n\nimport { SIGNALS } from './core.js'\nimport { getRealtimeSignals } from './realtime.js'\n\n// Retrieve list of know signals (including realtime) with information about\n// them\nexport const getSignals = function() {\n const realtimeSignals = getRealtimeSignals()\n const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)\n return signals\n}\n\n// Normalize signal:\n// - `number`: signal numbers are OS-specific. This is taken into account by\n// `os.constants.signals`. However we provide a default `number` since some\n// signals are not defined for some OS.\n// - `forced`: set default to `false`\n// - `supported`: set value\nconst normalizeSignal = function({\n name,\n number: defaultNumber,\n description,\n action,\n forced = false,\n standard,\n}) {\n const {\n signals: { [name]: constantSignal },\n } = constants\n const supported = constantSignal !== undefined\n const number = supported ? constantSignal : defaultNumber\n return { name, number, description, supported, action, forced, standard }\n}\n"],"file":"src/signals.js"} \ No newline at end of file diff --git a/node_modules/human-signals/package.json b/node_modules/human-signals/package.json deleted file mode 100644 index fd1d027..0000000 --- a/node_modules/human-signals/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "human-signals", - "version": "2.1.0", - "main": "build/src/main.js", - "files": [ - "build/src", - "!~" - ], - "scripts": { - "test": "gulp test" - }, - "husky": { - "hooks": { - "pre-push": "gulp check --full" - } - }, - "description": "Human-friendly process signals", - "keywords": [ - "signal", - "signals", - "handlers", - "error-handling", - "errors", - "interrupts", - "sigterm", - "sigint", - "irq", - "process", - "exit", - "exit-code", - "status", - "operating-system", - "es6", - "javascript", - "linux", - "macos", - "windows", - "nodejs" - ], - "license": "Apache-2.0", - "homepage": "https://git.io/JeluP", - "repository": "ehmicky/human-signals", - "bugs": { - "url": "https://github.com/ehmicky/human-signals/issues" - }, - "author": "ehmicky (https://github.com/ehmicky)", - "directories": { - "lib": "src", - "test": "test" - }, - "types": "build/src/main.d.ts", - "dependencies": {}, - "devDependencies": { - "@ehmicky/dev-tasks": "^0.31.9", - "ajv": "^6.12.0", - "ava": "^3.5.0", - "gulp": "^4.0.2", - "husky": "^4.2.3", - "test-each": "^2.0.0" - }, - "engines": { - "node": ">=10.17.0" - } -} diff --git a/node_modules/is-extglob/LICENSE b/node_modules/is-extglob/LICENSE deleted file mode 100644 index 842218c..0000000 --- a/node_modules/is-extglob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2016, Jon Schlinkert - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-extglob/README.md b/node_modules/is-extglob/README.md deleted file mode 100644 index 0416af5..0000000 --- a/node_modules/is-extglob/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) - -> Returns true if a string has an extglob. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-extglob -``` - -## Usage - -```js -var isExtglob = require('is-extglob'); -``` - -**True** - -```js -isExtglob('?(abc)'); -isExtglob('@(abc)'); -isExtglob('!(abc)'); -isExtglob('*(abc)'); -isExtglob('+(abc)'); -``` - -**False** - -Escaped extglobs: - -```js -isExtglob('\\?(abc)'); -isExtglob('\\@(abc)'); -isExtglob('\\!(abc)'); -isExtglob('\\*(abc)'); -isExtglob('\\+(abc)'); -``` - -Everything else... - -```js -isExtglob('foo.js'); -isExtglob('!foo.js'); -isExtglob('*.js'); -isExtglob('**/abc.js'); -isExtglob('abc/*.js'); -isExtglob('abc/(aaa|bbb).js'); -isExtglob('abc/[a-z].js'); -isExtglob('abc/{a,b}.js'); -isExtglob('abc/?.js'); -isExtglob('abc.js'); -isExtglob('abc/def/ghi.js'); -``` - -## History - -**v2.0** - -Adds support for escaping. Escaped exglobs no longer return true. - -## About - -### Related projects - -* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") -* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") - -### Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -### Building docs - -_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ - -To generate the readme and API documentation with [verb](https://github.com/verbose/verb): - -```sh -$ npm install -g verb verb-generate-readme && verb -``` - -### Running tests - -Install dev dependencies: - -```sh -$ npm install -d && npm test -``` - -### Author - -**Jon Schlinkert** - -* [github/jonschlinkert](https://github.com/jonschlinkert) -* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) - -### License - -Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/is-extglob/index.js b/node_modules/is-extglob/index.js deleted file mode 100644 index c1d986f..0000000 --- a/node_modules/is-extglob/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/*! - * is-extglob - * - * Copyright (c) 2014-2016, Jon Schlinkert. - * Licensed under the MIT License. - */ - -module.exports = function isExtglob(str) { - if (typeof str !== 'string' || str === '') { - return false; - } - - var match; - while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { - if (match[2]) return true; - str = str.slice(match.index + match[0].length); - } - - return false; -}; diff --git a/node_modules/is-extglob/package.json b/node_modules/is-extglob/package.json deleted file mode 100644 index 7a90836..0000000 --- a/node_modules/is-extglob/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "is-extglob", - "description": "Returns true if a string has an extglob.", - "version": "2.1.1", - "homepage": "https://github.com/jonschlinkert/is-extglob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "repository": "jonschlinkert/is-extglob", - "bugs": { - "url": "https://github.com/jonschlinkert/is-extglob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "has-glob", - "is-glob", - "micromatch" - ] - }, - "reflinks": [ - "verb", - "verb-generate-readme" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/is-glob/LICENSE b/node_modules/is-glob/LICENSE deleted file mode 100644 index 3f2eca1..0000000 --- a/node_modules/is-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-glob/README.md b/node_modules/is-glob/README.md deleted file mode 100644 index 740724b..0000000 --- a/node_modules/is-glob/README.md +++ /dev/null @@ -1,206 +0,0 @@ -# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) - -> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-glob -``` - -You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). - -## Usage - -```js -var isGlob = require('is-glob'); -``` - -### Default behavior - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js'); -isGlob('*.js'); -isGlob('**/abc.js'); -isGlob('abc/*.js'); -isGlob('abc/(aaa|bbb).js'); -isGlob('abc/[a-z].js'); -isGlob('abc/{a,b}.js'); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js'); -isGlob('abc/!(a).js'); -isGlob('abc/+(a).js'); -isGlob('abc/*(a).js'); -isGlob('abc/?(a).js'); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('abc/\\@(a).js'); -isGlob('abc/\\!(a).js'); -isGlob('abc/\\+(a).js'); -isGlob('abc/\\*(a).js'); -isGlob('abc/\\?(a).js'); -isGlob('\\!foo.js'); -isGlob('\\*.js'); -isGlob('\\*\\*/abc.js'); -isGlob('abc/\\*.js'); -isGlob('abc/\\(aaa|bbb).js'); -isGlob('abc/\\[a-z].js'); -isGlob('abc/\\{a,b}.js'); -//=> false -``` - -Patterns that do not have glob patterns return `false`: - -```js -isGlob('abc.js'); -isGlob('abc/def/ghi.js'); -isGlob('foo.js'); -isGlob('abc/@.js'); -isGlob('abc/+.js'); -isGlob('abc/?.js'); -isGlob(); -isGlob(null); -//=> false -``` - -Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): - -```js -isGlob(['**/*.js']); -isGlob(['foo.js']); -//=> false -``` - -### Option strict - -When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that -some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js', {strict: false}); -isGlob('*.js', {strict: false}); -isGlob('**/abc.js', {strict: false}); -isGlob('abc/*.js', {strict: false}); -isGlob('abc/(aaa|bbb).js', {strict: false}); -isGlob('abc/[a-z].js', {strict: false}); -isGlob('abc/{a,b}.js', {strict: false}); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js', {strict: false}); -isGlob('abc/!(a).js', {strict: false}); -isGlob('abc/+(a).js', {strict: false}); -isGlob('abc/*(a).js', {strict: false}); -isGlob('abc/?(a).js', {strict: false}); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('\\!foo.js', {strict: false}); -isGlob('\\*.js', {strict: false}); -isGlob('\\*\\*/abc.js', {strict: false}); -isGlob('abc/\\*.js', {strict: false}); -isGlob('abc/\\(aaa|bbb).js', {strict: false}); -isGlob('abc/\\[a-z].js', {strict: false}); -isGlob('abc/\\{a,b}.js', {strict: false}); -//=> false -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") -* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") -* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") -* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 47 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [doowb](https://github.com/doowb) | -| 1 | [phated](https://github.com/phated) | -| 1 | [danhper](https://github.com/danhper) | -| 1 | [paulmillr](https://github.com/paulmillr) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/is-glob/index.js b/node_modules/is-glob/index.js deleted file mode 100644 index 620f563..0000000 --- a/node_modules/is-glob/index.js +++ /dev/null @@ -1,150 +0,0 @@ -/*! - * is-glob - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -var isExtglob = require('is-extglob'); -var chars = { '{': '}', '(': ')', '[': ']'}; -var strictCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - var pipeIndex = -2; - var closeSquareIndex = -2; - var closeCurlyIndex = -2; - var closeParenIndex = -2; - var backSlashIndex = -2; - while (index < str.length) { - if (str[index] === '*') { - return true; - } - - if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { - return true; - } - - if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { - if (closeSquareIndex < index) { - closeSquareIndex = str.indexOf(']', index); - } - if (closeSquareIndex > index) { - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - } - } - - if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { - closeCurlyIndex = str.indexOf('}', index); - if (closeCurlyIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { - return true; - } - } - } - - if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { - closeParenIndex = str.indexOf(')', index); - if (closeParenIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - - if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { - if (pipeIndex < index) { - pipeIndex = str.indexOf('|', index); - } - if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { - closeParenIndex = str.indexOf(')', pipeIndex); - if (closeParenIndex > pipeIndex) { - backSlashIndex = str.indexOf('\\', pipeIndex); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -var relaxedCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - while (index < str.length) { - if (/[*?{}()[\]]/.test(str[index])) { - return true; - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -module.exports = function isGlob(str, options) { - if (typeof str !== 'string' || str === '') { - return false; - } - - if (isExtglob(str)) { - return true; - } - - var check = strictCheck; - - // optionally relax check - if (options && options.strict === false) { - check = relaxedCheck; - } - - return check(str); -}; diff --git a/node_modules/is-glob/package.json b/node_modules/is-glob/package.json deleted file mode 100644 index 858af03..0000000 --- a/node_modules/is-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "is-glob", - "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", - "version": "4.0.3", - "homepage": "https://github.com/micromatch/is-glob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Daniel Perez (https://tuvistavie.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/is-glob", - "bugs": { - "url": "https://github.com/micromatch/is-glob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha && node benchmark.js" - }, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "layout": "default", - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "assemble", - "base", - "update", - "verb" - ] - }, - "reflinks": [ - "assemble", - "bach", - "base", - "composer", - "gulp", - "has-glob", - "is-valid-glob", - "micromatch", - "npm", - "scaffold", - "verb", - "vinyl" - ] - } -} diff --git a/node_modules/is-number/LICENSE b/node_modules/is-number/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/is-number/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/is-number/README.md b/node_modules/is-number/README.md deleted file mode 100644 index eb8149e..0000000 --- a/node_modules/is-number/README.md +++ /dev/null @@ -1,187 +0,0 @@ -# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) - -> Returns true if the value is a finite number. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-number -``` - -## Why is this needed? - -In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: - -```js -console.log(+[]); //=> 0 -console.log(+''); //=> 0 -console.log(+' '); //=> 0 -console.log(typeof NaN); //=> 'number' -``` - -This library offers a performant way to smooth out edge cases like these. - -## Usage - -```js -const isNumber = require('is-number'); -``` - -See the [tests](./test.js) for more examples. - -### true - -```js -isNumber(5e3); // true -isNumber(0xff); // true -isNumber(-1.1); // true -isNumber(0); // true -isNumber(1); // true -isNumber(1.1); // true -isNumber(10); // true -isNumber(10.10); // true -isNumber(100); // true -isNumber('-1.1'); // true -isNumber('0'); // true -isNumber('012'); // true -isNumber('0xff'); // true -isNumber('1'); // true -isNumber('1.1'); // true -isNumber('10'); // true -isNumber('10.10'); // true -isNumber('100'); // true -isNumber('5e3'); // true -isNumber(parseInt('012')); // true -isNumber(parseFloat('012')); // true -``` - -### False - -Everything else is false, as you would expect: - -```js -isNumber(Infinity); // false -isNumber(NaN); // false -isNumber(null); // false -isNumber(undefined); // false -isNumber(''); // false -isNumber(' '); // false -isNumber('foo'); // false -isNumber([1]); // false -isNumber([]); // false -isNumber(function () {}); // false -isNumber({}); // false -``` - -## Release history - -### 7.0.0 - -* Refactor. Now uses `.isFinite` if it exists. -* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. - -### 6.0.0 - -* Optimizations, thanks to @benaadams. - -### 5.0.0 - -**Breaking changes** - -* removed support for `instanceof Number` and `instanceof String` - -## Benchmarks - -As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. - -``` -# all -v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) -v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) -parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) -fastest is 'v7.0' - -# string -v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) -v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) -parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) -fastest is 'parseFloat,v7.0' - -# number -v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) -v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) -parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) -fastest is 'v6.0' -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") -* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") -* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") -* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 49 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [charlike-old](https://github.com/charlike-old) | -| 1 | [benaadams](https://github.com/benaadams) | -| 1 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) - -### License - -Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/is-number/index.js b/node_modules/is-number/index.js deleted file mode 100644 index 27f19b7..0000000 --- a/node_modules/is-number/index.js +++ /dev/null @@ -1,18 +0,0 @@ -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -module.exports = function(num) { - if (typeof num === 'number') { - return num - num === 0; - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); - } - return false; -}; diff --git a/node_modules/is-number/package.json b/node_modules/is-number/package.json deleted file mode 100644 index 3715072..0000000 --- a/node_modules/is-number/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "is-number", - "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", - "version": "7.0.0", - "homepage": "https://github.com/jonschlinkert/is-number", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Olsten Larck (https://i.am.charlike.online)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "jonschlinkert/is-number", - "bugs": { - "url": "https://github.com/jonschlinkert/is-number/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.12.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "ansi": "^0.3.1", - "benchmark": "^2.1.4", - "gulp-format-md": "^1.0.0", - "mocha": "^3.5.3" - }, - "keywords": [ - "cast", - "check", - "coerce", - "coercion", - "finite", - "integer", - "is", - "isnan", - "is-nan", - "is-num", - "is-number", - "isnumber", - "isfinite", - "istype", - "kind", - "math", - "nan", - "num", - "number", - "numeric", - "parseFloat", - "parseInt", - "test", - "type", - "typeof", - "value" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "related": { - "list": [ - "is-plain-object", - "is-primitive", - "isobject", - "kind-of" - ] - }, - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/is-stream/index.d.ts b/node_modules/is-stream/index.d.ts deleted file mode 100644 index eee2e83..0000000 --- a/node_modules/is-stream/index.d.ts +++ /dev/null @@ -1,79 +0,0 @@ -import * as stream from 'stream'; - -declare const isStream: { - /** - @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream(fs.createReadStream('unicorn.png')); - //=> true - - isStream({}); - //=> false - ``` - */ - (stream: unknown): stream is stream.Stream; - - /** - @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream.writable(fs.createWriteStrem('unicorn.txt')); - //=> true - ``` - */ - writable(stream: unknown): stream is stream.Writable; - - /** - @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream.readable(fs.createReadStream('unicorn.png')); - //=> true - ``` - */ - readable(stream: unknown): stream is stream.Readable; - - /** - @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). - - @example - ``` - import {Duplex} from 'stream'; - import isStream = require('is-stream'); - - isStream.duplex(new Duplex()); - //=> true - ``` - */ - duplex(stream: unknown): stream is stream.Duplex; - - /** - @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). - - @example - ``` - import * as fs from 'fs'; - import Stringify = require('streaming-json-stringify'); - import isStream = require('is-stream'); - - isStream.transform(Stringify()); - //=> true - ``` - */ - transform(input: unknown): input is stream.Transform; -}; - -export = isStream; diff --git a/node_modules/is-stream/index.js b/node_modules/is-stream/index.js deleted file mode 100644 index 2e43434..0000000 --- a/node_modules/is-stream/index.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; - -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; - -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; - -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); - -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; - -module.exports = isStream; diff --git a/node_modules/is-stream/license b/node_modules/is-stream/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/is-stream/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-stream/package.json b/node_modules/is-stream/package.json deleted file mode 100644 index c3b5673..0000000 --- a/node_modules/is-stream/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "is-stream", - "version": "2.0.1", - "description": "Check if something is a Node.js stream", - "license": "MIT", - "repository": "sindresorhus/is-stream", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "stream", - "type", - "streams", - "writable", - "readable", - "duplex", - "transform", - "check", - "detect", - "is" - ], - "devDependencies": { - "@types/node": "^11.13.6", - "ava": "^1.4.1", - "tempy": "^0.3.0", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/is-stream/readme.md b/node_modules/is-stream/readme.md deleted file mode 100644 index 19308e7..0000000 --- a/node_modules/is-stream/readme.md +++ /dev/null @@ -1,60 +0,0 @@ -# is-stream - -> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) - -## Install - -``` -$ npm install is-stream -``` - -## Usage - -```js -const fs = require('fs'); -const isStream = require('is-stream'); - -isStream(fs.createReadStream('unicorn.png')); -//=> true - -isStream({}); -//=> false -``` - -## API - -### isStream(stream) - -Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). - -#### isStream.writable(stream) - -Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). - -#### isStream.readable(stream) - -Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). - -#### isStream.duplex(stream) - -Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). - -#### isStream.transform(stream) - -Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). - -## Related - -- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/isexe/.npmignore b/node_modules/isexe/.npmignore deleted file mode 100644 index c1cb757..0000000 --- a/node_modules/isexe/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -.nyc_output/ -coverage/ diff --git a/node_modules/isexe/LICENSE b/node_modules/isexe/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/isexe/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/isexe/README.md b/node_modules/isexe/README.md deleted file mode 100644 index 35769e8..0000000 --- a/node_modules/isexe/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# isexe - -Minimal module to check if a file is executable, and a normal file. - -Uses `fs.stat` and tests against the `PATHEXT` environment variable on -Windows. - -## USAGE - -```javascript -var isexe = require('isexe') -isexe('some-file-name', function (err, isExe) { - if (err) { - console.error('probably file does not exist or something', err) - } else if (isExe) { - console.error('this thing can be run') - } else { - console.error('cannot be run') - } -}) - -// same thing but synchronous, throws errors -var isExe = isexe.sync('some-file-name') - -// treat errors as just "not executable" -isexe('maybe-missing-file', { ignoreErrors: true }, callback) -var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) -``` - -## API - -### `isexe(path, [options], [callback])` - -Check if the path is executable. If no callback provided, and a -global `Promise` object is available, then a Promise will be returned. - -Will raise whatever errors may be raised by `fs.stat`, unless -`options.ignoreErrors` is set to true. - -### `isexe.sync(path, [options])` - -Same as `isexe` but returns the value and throws any errors raised. - -### Options - -* `ignoreErrors` Treat all errors as "no, this is not executable", but - don't raise them. -* `uid` Number to use as the user id -* `gid` Number to use as the group id -* `pathExt` List of path extensions to use instead of `PATHEXT` - environment variable on Windows. diff --git a/node_modules/isexe/index.js b/node_modules/isexe/index.js deleted file mode 100644 index 553fb32..0000000 --- a/node_modules/isexe/index.js +++ /dev/null @@ -1,57 +0,0 @@ -var fs = require('fs') -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = require('./windows.js') -} else { - core = require('./mode.js') -} - -module.exports = isexe -isexe.sync = sync - -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } - - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) - } - - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } - } - cb(er, is) - }) -} - -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } -} diff --git a/node_modules/isexe/mode.js b/node_modules/isexe/mode.js deleted file mode 100644 index 1995ea4..0000000 --- a/node_modules/isexe/mode.js +++ /dev/null @@ -1,41 +0,0 @@ -module.exports = isexe -isexe.sync = sync - -var fs = require('fs') - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} - -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} - -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid - - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() - - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g - - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 - - return ret -} diff --git a/node_modules/isexe/package.json b/node_modules/isexe/package.json deleted file mode 100644 index e452689..0000000 --- a/node_modules/isexe/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "isexe", - "version": "2.0.0", - "description": "Minimal module to check if a file is executable.", - "main": "index.js", - "directories": { - "test": "test" - }, - "devDependencies": { - "mkdirp": "^0.5.1", - "rimraf": "^2.5.0", - "tap": "^10.3.0" - }, - "scripts": { - "test": "tap test/*.js --100", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --all; git push origin --tags" - }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/isexe.git" - }, - "keywords": [], - "bugs": { - "url": "https://github.com/isaacs/isexe/issues" - }, - "homepage": "https://github.com/isaacs/isexe#readme" -} diff --git a/node_modules/isexe/test/basic.js b/node_modules/isexe/test/basic.js deleted file mode 100644 index d926df6..0000000 --- a/node_modules/isexe/test/basic.js +++ /dev/null @@ -1,221 +0,0 @@ -var t = require('tap') -var fs = require('fs') -var path = require('path') -var fixture = path.resolve(__dirname, 'fixtures') -var meow = fixture + '/meow.cat' -var mine = fixture + '/mine.cat' -var ours = fixture + '/ours.cat' -var fail = fixture + '/fail.false' -var noent = fixture + '/enoent.exe' -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') - -var isWindows = process.platform === 'win32' -var hasAccess = typeof fs.access === 'function' -var winSkip = isWindows && 'windows' -var accessSkip = !hasAccess && 'no fs.access function' -var hasPromise = typeof Promise === 'function' -var promiseSkip = !hasPromise && 'no global Promise' - -function reset () { - delete require.cache[require.resolve('../')] - return require('../') -} - -t.test('setup fixtures', function (t) { - rimraf.sync(fixture) - mkdirp.sync(fixture) - fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') - fs.chmodSync(meow, parseInt('0755', 8)) - fs.writeFileSync(fail, '#!/usr/bin/env false\n') - fs.chmodSync(fail, parseInt('0644', 8)) - fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') - fs.chmodSync(mine, parseInt('0744', 8)) - fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') - fs.chmodSync(ours, parseInt('0754', 8)) - t.end() -}) - -t.test('promise', { skip: promiseSkip }, function (t) { - var isexe = reset() - t.test('meow async', function (t) { - isexe(meow).then(function (is) { - t.ok(is) - t.end() - }) - }) - t.test('fail async', function (t) { - isexe(fail).then(function (is) { - t.notOk(is) - t.end() - }) - }) - t.test('noent async', function (t) { - isexe(noent).catch(function (er) { - t.ok(er) - t.end() - }) - }) - t.test('noent ignore async', function (t) { - isexe(noent, { ignoreErrors: true }).then(function (is) { - t.notOk(is) - t.end() - }) - }) - t.end() -}) - -t.test('no promise', function (t) { - global.Promise = null - var isexe = reset() - t.throws('try to meow a promise', function () { - isexe(meow) - }) - t.end() -}) - -t.test('access', { skip: accessSkip || winSkip }, function (t) { - runTest(t) -}) - -t.test('mode', { skip: winSkip }, function (t) { - delete fs.access - delete fs.accessSync - var isexe = reset() - t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) - t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) - runTest(t) -}) - -t.test('windows', function (t) { - global.TESTING_WINDOWS = true - var pathExt = '.EXE;.CAT;.CMD;.COM' - t.test('pathExt option', function (t) { - runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) - }) - t.test('pathExt env', function (t) { - process.env.PATHEXT = pathExt - runTest(t) - }) - t.test('no pathExt', function (t) { - // with a pathExt of '', any filename is fine. - // so the "fail" one would still pass. - runTest(t, { pathExt: '', skipFail: true }) - }) - t.test('pathext with empty entry', function (t) { - // with a pathExt of '', any filename is fine. - // so the "fail" one would still pass. - runTest(t, { pathExt: ';' + pathExt, skipFail: true }) - }) - t.end() -}) - -t.test('cleanup', function (t) { - rimraf.sync(fixture) - t.end() -}) - -function runTest (t, options) { - var isexe = reset() - - var optionsIgnore = Object.create(options || {}) - optionsIgnore.ignoreErrors = true - - if (!options || !options.skipFail) { - t.notOk(isexe.sync(fail, options)) - } - t.notOk(isexe.sync(noent, optionsIgnore)) - if (!options) { - t.ok(isexe.sync(meow)) - } else { - t.ok(isexe.sync(meow, options)) - } - - t.ok(isexe.sync(mine, options)) - t.ok(isexe.sync(ours, options)) - t.throws(function () { - isexe.sync(noent, options) - }) - - t.test('meow async', function (t) { - if (!options) { - isexe(meow, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - } else { - isexe(meow, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - } - }) - - t.test('mine async', function (t) { - isexe(mine, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - }) - - t.test('ours async', function (t) { - isexe(ours, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - }) - - if (!options || !options.skipFail) { - t.test('fail async', function (t) { - isexe(fail, options, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - } - - t.test('noent async', function (t) { - isexe(noent, options, function (er, is) { - t.ok(er) - t.notOk(is) - t.end() - }) - }) - - t.test('noent ignore async', function (t) { - isexe(noent, optionsIgnore, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - - t.test('directory is not executable', function (t) { - isexe(__dirname, options, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - - t.end() -} diff --git a/node_modules/isexe/windows.js b/node_modules/isexe/windows.js deleted file mode 100644 index 3499673..0000000 --- a/node_modules/isexe/windows.js +++ /dev/null @@ -1,42 +0,0 @@ -module.exports = isexe -isexe.sync = sync - -var fs = require('fs') - -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT - - if (!pathext) { - return true - } - - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false -} - -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) -} - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} diff --git a/node_modules/merge-stream/LICENSE b/node_modules/merge-stream/LICENSE deleted file mode 100644 index 94a4c0a..0000000 --- a/node_modules/merge-stream/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Stephen Sugden (stephensugden.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/merge-stream/README.md b/node_modules/merge-stream/README.md deleted file mode 100644 index 0d54841..0000000 --- a/node_modules/merge-stream/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# merge-stream - -Merge (interleave) a bunch of streams. - -[![build status](https://secure.travis-ci.org/grncdr/merge-stream.svg?branch=master)](http://travis-ci.org/grncdr/merge-stream) - -## Synopsis - -```javascript -var stream1 = new Stream(); -var stream2 = new Stream(); - -var merged = mergeStream(stream1, stream2); - -var stream3 = new Stream(); -merged.add(stream3); -merged.isEmpty(); -//=> false -``` - -## Description - -This is adapted from [event-stream](https://github.com/dominictarr/event-stream) separated into a new module, using Streams3. - -## API - -### `mergeStream` - -Type: `function` - -Merges an arbitrary number of streams. Returns a merged stream. - -#### `merged.add` - -A method to dynamically add more sources to the stream. The argument supplied to `add` can be either a source or an array of sources. - -#### `merged.isEmpty` - -A method that tells you if the merged stream is empty. - -When a stream is "empty" (aka. no sources were added), it could not be returned to a gulp task. - -So, we could do something like this: - -```js -stream = require('merge-stream')(); -// Something like a loop to add some streams to the merge stream -// stream.add(streamA); -// stream.add(streamB); -return stream.isEmpty() ? null : stream; -``` - -## Gulp example - -An example use case for **merge-stream** is to combine parts of a task in a project's **gulpfile.js** like this: - -```js -const gulp = require('gulp'); -const htmlValidator = require('gulp-w3c-html-validator'); -const jsHint = require('gulp-jshint'); -const mergeStream = require('merge-stream'); - -function lint() { - return mergeStream( - gulp.src('src/*.html') - .pipe(htmlValidator()) - .pipe(htmlValidator.reporter()), - gulp.src('src/*.js') - .pipe(jsHint()) - .pipe(jsHint.reporter()) - ); -} -gulp.task('lint', lint); -``` - -## License - -MIT diff --git a/node_modules/merge-stream/index.js b/node_modules/merge-stream/index.js deleted file mode 100644 index b1a9e1a..0000000 --- a/node_modules/merge-stream/index.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -const { PassThrough } = require('stream'); - -module.exports = function (/*streams...*/) { - var sources = [] - var output = new PassThrough({objectMode: true}) - - output.setMaxListeners(0) - - output.add = add - output.isEmpty = isEmpty - - output.on('unpipe', remove) - - Array.prototype.slice.call(arguments).forEach(add) - - return output - - function add (source) { - if (Array.isArray(source)) { - source.forEach(add) - return this - } - - sources.push(source); - source.once('end', remove.bind(null, source)) - source.once('error', output.emit.bind(output, 'error')) - source.pipe(output, {end: false}) - return this - } - - function isEmpty () { - return sources.length == 0; - } - - function remove (source) { - sources = sources.filter(function (it) { return it !== source }) - if (!sources.length && output.readable) { output.end() } - } -} diff --git a/node_modules/merge-stream/package.json b/node_modules/merge-stream/package.json deleted file mode 100644 index 1a4c54c..0000000 --- a/node_modules/merge-stream/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "merge-stream", - "version": "2.0.0", - "description": "Create a stream that emits events from multiple other streams", - "files": [ - "index.js" - ], - "scripts": { - "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" - }, - "repository": "grncdr/merge-stream", - "author": "Stephen Sugden ", - "license": "MIT", - "dependencies": {}, - "devDependencies": { - "from2": "^2.0.3", - "istanbul": "^0.4.5" - } -} diff --git a/node_modules/merge2/LICENSE b/node_modules/merge2/LICENSE deleted file mode 100644 index 31dd9c7..0000000 --- a/node_modules/merge2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2020 Teambition - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/merge2/README.md b/node_modules/merge2/README.md deleted file mode 100644 index 27f8eb9..0000000 --- a/node_modules/merge2/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# merge2 - -Merge multiple streams into one stream in sequence or parallel. - -[![NPM version][npm-image]][npm-url] -[![Build Status][travis-image]][travis-url] -[![Downloads][downloads-image]][downloads-url] - -## Install - -Install with [npm](https://npmjs.org/package/merge2) - -```sh -npm install merge2 -``` - -## Usage - -```js -const gulp = require('gulp') -const merge2 = require('merge2') -const concat = require('gulp-concat') -const minifyHtml = require('gulp-minify-html') -const ngtemplate = require('gulp-ngtemplate') - -gulp.task('app-js', function () { - return merge2( - gulp.src('static/src/tpl/*.html') - .pipe(minifyHtml({empty: true})) - .pipe(ngtemplate({ - module: 'genTemplates', - standalone: true - }) - ), gulp.src([ - 'static/src/js/app.js', - 'static/src/js/locale_zh-cn.js', - 'static/src/js/router.js', - 'static/src/js/tools.js', - 'static/src/js/services.js', - 'static/src/js/filters.js', - 'static/src/js/directives.js', - 'static/src/js/controllers.js' - ]) - ) - .pipe(concat('app.js')) - .pipe(gulp.dest('static/dist/js/')) -}) -``` - -```js -const stream = merge2([stream1, stream2], stream3, {end: false}) -//... -stream.add(stream4, stream5) -//.. -stream.end() -``` - -```js -// equal to merge2([stream1, stream2], stream3) -const stream = merge2() -stream.add([stream1, stream2]) -stream.add(stream3) -``` - -```js -// merge order: -// 1. merge `stream1`; -// 2. merge `stream2` and `stream3` in parallel after `stream1` merged; -// 3. merge 'stream4' after `stream2` and `stream3` merged; -const stream = merge2(stream1, [stream2, stream3], stream4) - -// merge order: -// 1. merge `stream5` and `stream6` in parallel after `stream4` merged; -// 2. merge 'stream7' after `stream5` and `stream6` merged; -stream.add([stream5, stream6], stream7) -``` - -```js -// nest merge -// equal to merge2(stream1, stream2, stream6, stream3, [stream4, stream5]); -const streamA = merge2(stream1, stream2) -const streamB = merge2(stream3, [stream4, stream5]) -const stream = merge2(streamA, streamB) -streamA.add(stream6) -``` - -## API - -```js -const merge2 = require('merge2') -``` - -### merge2() - -### merge2(options) - -### merge2(stream1, stream2, ..., streamN) - -### merge2(stream1, stream2, ..., streamN, options) - -### merge2(stream1, [stream2, stream3, ...], streamN, options) - -return a duplex stream (mergedStream). streams in array will be merged in parallel. - -### mergedStream.add(stream) - -### mergedStream.add(stream1, [stream2, stream3, ...], ...) - -return the mergedStream. - -### mergedStream.on('queueDrain', function() {}) - -It will emit 'queueDrain' when all streams merged. If you set `end === false` in options, this event give you a notice that should add more streams to merge or end the mergedStream. - -#### stream - -*option* -Type: `Readable` or `Duplex` or `Transform` stream. - -#### options - -*option* -Type: `Object`. - -* **end** - `Boolean` - if `end === false` then mergedStream will not be auto ended, you should end by yourself. **Default:** `undefined` - -* **pipeError** - `Boolean` - if `pipeError === true` then mergedStream will emit `error` event from source streams. **Default:** `undefined` - -* **objectMode** - `Boolean` . **Default:** `true` - -`objectMode` and other options(`highWaterMark`, `defaultEncoding` ...) is same as Node.js `Stream`. - -## License - -MIT © [Teambition](https://www.teambition.com) - -[npm-url]: https://npmjs.org/package/merge2 -[npm-image]: http://img.shields.io/npm/v/merge2.svg - -[travis-url]: https://travis-ci.org/teambition/merge2 -[travis-image]: http://img.shields.io/travis/teambition/merge2.svg - -[downloads-url]: https://npmjs.org/package/merge2 -[downloads-image]: http://img.shields.io/npm/dm/merge2.svg?style=flat-square diff --git a/node_modules/merge2/index.js b/node_modules/merge2/index.js deleted file mode 100644 index 78a61ed..0000000 --- a/node_modules/merge2/index.js +++ /dev/null @@ -1,144 +0,0 @@ -'use strict' -/* - * merge2 - * https://github.com/teambition/merge2 - * - * Copyright (c) 2014-2020 Teambition - * Licensed under the MIT license. - */ -const Stream = require('stream') -const PassThrough = Stream.PassThrough -const slice = Array.prototype.slice - -module.exports = merge2 - -function merge2 () { - const streamsQueue = [] - const args = slice.call(arguments) - let merging = false - let options = args[args.length - 1] - - if (options && !Array.isArray(options) && options.pipe == null) { - args.pop() - } else { - options = {} - } - - const doEnd = options.end !== false - const doPipeError = options.pipeError === true - if (options.objectMode == null) { - options.objectMode = true - } - if (options.highWaterMark == null) { - options.highWaterMark = 64 * 1024 - } - const mergedStream = PassThrough(options) - - function addStream () { - for (let i = 0, len = arguments.length; i < len; i++) { - streamsQueue.push(pauseStreams(arguments[i], options)) - } - mergeStream() - return this - } - - function mergeStream () { - if (merging) { - return - } - merging = true - - let streams = streamsQueue.shift() - if (!streams) { - process.nextTick(endStream) - return - } - if (!Array.isArray(streams)) { - streams = [streams] - } - - let pipesCount = streams.length + 1 - - function next () { - if (--pipesCount > 0) { - return - } - merging = false - mergeStream() - } - - function pipe (stream) { - function onend () { - stream.removeListener('merge2UnpipeEnd', onend) - stream.removeListener('end', onend) - if (doPipeError) { - stream.removeListener('error', onerror) - } - next() - } - function onerror (err) { - mergedStream.emit('error', err) - } - // skip ended stream - if (stream._readableState.endEmitted) { - return next() - } - - stream.on('merge2UnpipeEnd', onend) - stream.on('end', onend) - - if (doPipeError) { - stream.on('error', onerror) - } - - stream.pipe(mergedStream, { end: false }) - // compatible for old stream - stream.resume() - } - - for (let i = 0; i < streams.length; i++) { - pipe(streams[i]) - } - - next() - } - - function endStream () { - merging = false - // emit 'queueDrain' when all streams merged. - mergedStream.emit('queueDrain') - if (doEnd) { - mergedStream.end() - } - } - - mergedStream.setMaxListeners(0) - mergedStream.add = addStream - mergedStream.on('unpipe', function (stream) { - stream.emit('merge2UnpipeEnd') - }) - - if (args.length) { - addStream.apply(null, args) - } - return mergedStream -} - -// check and pause streams for pipe. -function pauseStreams (streams, options) { - if (!Array.isArray(streams)) { - // Backwards-compat with old-style streams - if (!streams._readableState && streams.pipe) { - streams = streams.pipe(PassThrough(options)) - } - if (!streams._readableState || !streams.pause || !streams.pipe) { - throw new Error('Only readable stream can be merged.') - } - streams.pause() - } else { - for (let i = 0, len = streams.length; i < len; i++) { - streams[i] = pauseStreams(streams[i], options) - } - } - return streams -} diff --git a/node_modules/merge2/package.json b/node_modules/merge2/package.json deleted file mode 100644 index 7777307..0000000 --- a/node_modules/merge2/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "merge2", - "description": "Merge multiple streams into one stream in sequence or parallel.", - "authors": [ - "Yan Qing " - ], - "license": "MIT", - "version": "1.4.1", - "main": "./index.js", - "repository": { - "type": "git", - "url": "git@github.com:teambition/merge2.git" - }, - "homepage": "https://github.com/teambition/merge2", - "keywords": [ - "merge2", - "multiple", - "sequence", - "parallel", - "merge", - "stream", - "merge stream", - "sync" - ], - "engines": { - "node": ">= 8" - }, - "dependencies": {}, - "devDependencies": { - "standard": "^14.3.4", - "through2": "^3.0.1", - "thunks": "^4.9.6", - "tman": "^1.10.0", - "to-through": "^2.0.0" - }, - "scripts": { - "test": "standard && tman" - }, - "files": [ - "README.md", - "index.js" - ] -} diff --git a/node_modules/metacommon/.travis.yml b/node_modules/metacommon/.travis.yml deleted file mode 100644 index d56cc68..0000000 --- a/node_modules/metacommon/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: c -services: docker -os: linux -script: - - npm run docker diff --git a/node_modules/metacommon/README.md b/node_modules/metacommon/README.md deleted file mode 100644 index b644db1..0000000 --- a/node_modules/metacommon/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# metacommon [![npm version](https://badge.fury.io/js/metacommon.svg)](https://badge.fury.io/js/metacommon) -C++ Header-only repository with macros and very much metaprogramming diff --git a/node_modules/metacommon/cmaki.yml b/node_modules/metacommon/cmaki.yml deleted file mode 100644 index 1415403..0000000 --- a/node_modules/metacommon/cmaki.yml +++ /dev/null @@ -1,14 +0,0 @@ -- metacommon: - <<: *thirdparty_defaults - post_install: - - ./*.h include/metacommon/ - source: https://github.com/makiolo/metacommon.git - build: - | - #!/bin/bash - CMAKI_INSTALL=$SELFHOME npm install - targets: - # header only - - dummy: - info: - <<: *library_dynamic diff --git a/node_modules/metacommon/common.h b/node_modules/metacommon/common.h deleted file mode 100644 index f439a2d..0000000 --- a/node_modules/metacommon/common.h +++ /dev/null @@ -1,426 +0,0 @@ -#ifndef _META_COMMON_ -#define _META_COMMON_ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#ifndef STATIC_MULTITHREAD -#if defined(__clang__) -#define STATIC_MULTITHREAD static -#elif defined(__GNUC__) || defined(__GNUG__) -#define STATIC_MULTITHREAD static __thread -#elif defined(_MSC_VER) -#define STATIC_MULTITHREAD __declspec(thread) static -#endif -#endif - -#if __cplusplus < 201402L && 0 - -namespace std { - -template -std::unique_ptr make_unique(Args&&... args) -{ - return std::unique_ptr(new T(std::forward(args)...)); -} - -} - -#endif - -namespace ctti { - -// http://stackoverflow.com/a/15863804 - -// helper function -constexpr unsigned c_strlen( char const* str, unsigned count = 0 ) -{ - return ('\0' == str[0]) ? count : c_strlen(str+1, count+1); -} - -// helper "function" struct -template < char t_c, char... tt_c > -struct rec_print -{ - static void print() - { - std::cout << t_c; - rec_print < tt_c... > :: print (); - } -}; - -template < char t_c > -struct rec_print < t_c > -{ - static void print() - { - std::cout << t_c; - } -}; - -// helper "function" struct -template < char t_c, char... tt_c > -struct rec_get -{ - static void get(std::stringstream& ss) - { - ss << t_c; - rec_get < tt_c... > :: get (ss); - } -}; - -template < char t_c > -struct rec_get < t_c > -{ - static void get(std::stringstream& ss) - { - ss << t_c; - } -}; - -template < char t_c, char... tt_c > -struct rec_hash -{ - static constexpr size_t hash(size_t seed) - { - return rec_hash ::hash(seed * 33 ^ static_cast(t_c)); - } -}; - -template < char t_c > -struct rec_hash < t_c > -{ - static constexpr size_t hash(size_t seed) - { - return seed * 33 ^ static_cast(t_c); - } -}; - -// destination "template string" type -template < char... tt_c > -struct str_typed_string -{ - static void print() - { - rec_print < tt_c... > :: print(); - std::cout << std::endl; - } - - static std::string get() - { - std::stringstream ss; - rec_get :: get(ss); - return ss.str(); - } - - static constexpr size_t hash() - { - return rec_hash ::hash(5381); - } -}; - -// struct to str_type a `char const*` to an `str_typed_string` type -template < typename T_StrProvider, unsigned t_len, char... tt_c > -struct str_type_impl -{ - using result = typename str_type_impl < T_StrProvider, t_len-1, - T_StrProvider::KEY()[t_len-1], - tt_c... > :: result; -}; - -template < typename T_StrProvider, char... tt_c > -struct str_type_impl < T_StrProvider, 0, tt_c... > -{ - using result = str_typed_string < tt_c... >; -}; - -// syntactical sugar -template < typename T_StrProvider > -using str_type = typename str_type_impl < T_StrProvider, c_strlen(T_StrProvider::KEY()) > :: result; - -} // end namespace - -namespace mc { - -/* - -template -class get_type -{ - template - using type = typename std::tuple_element >::type; -}; - -template -void __foreach_tuple(Function&& func, T&& elem) -{ - func(elem); -} - -template -void __foreach_tuple(Function&& func, T&& elem, Args&& ... args) -{ - // static_assert(std::is_same::type<0> >::value, ""); - func(elem); - __foreach_tuple(std::forward(func), std::forward(args)...); -} - -template -void _foreach_tuple(const std::tuple& t, Function&& func, std::index_sequence) -{ - __foreach_tuple(std::forward(func), std::get(t)...); -} - -template -void foreach_tuple(const std::tuple& t, Function&& func) -{ - _foreach_tuple(t, std::forward(func), std::make_index_sequence < - std::tuple_size< std::tuple >::value - >()); -} - -template -void foreach_args(Function&& func, Args&& ... args) -{ - foreach_tuple(std::make_tuple(std::forward(args)...), std::forward(func)); -} - -template -auto _vector_to_tuple(const std::vector& v, std::index_sequence) -{ - return std::make_tuple(v[N]...); -} - -template -auto vector_to_tuple(const std::vector& v) -{ - assert(v.size() >= N); - return _vector_to_tuple(v, std::make_index_sequence()); -} - -template -std::vector tuple_to_vector(const std::tuple& t) -{ - std::vector v; - foreach_tuple(t, [&v](const auto& d) { - v.emplace_back(d); - }); - return v; -} - -// http://aherrmann.github.io/programming/2016/02/28/unpacking-tuples-in-cpp14/ -template -constexpr auto index_apply_impl(F&& f, std::index_sequence) { - return f(std::integral_constant {}...); -} - -template -constexpr auto index_apply(F&& f) { - return index_apply_impl(std::forward(f), std::make_index_sequence{}); -} - -template -constexpr auto _head(Tuple t) { - return index_apply<1>([&](auto... Is) { - return std::make_tuple(std::get(t)...); - }); -} - -*/ - -template -constexpr auto head(Tuple t) -{ - return std::get<0>(_head(t)); -} - -/* - -template -constexpr auto tail(Tuple t) { - return index_apply{}-1u>([&](auto... Is) { - return std::make_tuple(std::get(t)...); - }); -} - -template -constexpr auto reverse(Tuple t) { - return index_apply{}>( - [&](auto... Is) { - return std::make_tuple( - std::get{} - 1 - Is>(t)...); - }); -} - -// http://open-std.org/jtc1/sc22/wg21/docs/papers/2014/n3829.pdf -template -constexpr auto apply(Function&& f, Tuple&& t) { - return index_apply{}>( - [&](auto... Is) { - return std::forward(f)( std::get(std::forward(t))... ); - }); -} - -*/ - -// http://talesofcpp.fusionfenix.com/post-14/true-story-moving-past-bind -template -auto bind(F&& f, Args&&... args) { - return [ - f = std::forward(f) - , args = std::make_tuple(std::forward(args)...) - ]() mutable -> decltype(auto) { - return apply(std::move(f), std::move(args)); - }; -} - -} // end namespace mc - -// method macros -#define DEFINE_KEY(__CLASS__) \ - constexpr static char const* KEY() { return #__CLASS__; } \ - virtual const std::string& getKEY() const { static std::string key = #__CLASS__; return key; } \ - -// method non-macros (yes, exists optional macro :D) -#define DEFINE_HASH(__CLASS__) \ - namespace std { \ - template <> \ - struct hash<__CLASS__> \ - { size_t operator()() const { static size_t h = std::hash()(#__CLASS__); return h; } }; } \ - -#define DEFINE_HASH_CUSTOM(__CLASS__, __TYPE__, __VALUE__) \ - namespace std { \ - template <> \ - struct hash<__CLASS__> \ - { size_t operator()() const { static size_t h = std::hash<__TYPE__>()(__VALUE__); return h; } }; } \ - -template -class has_key -{ - typedef char(&yes)[2]; - - template struct Exists; - - template - static yes CheckMember(Exists*); - template - static char CheckMember(...); - -public: - static const bool value = (sizeof(CheckMember(0)) == sizeof(yes)); -}; - -template -class has_instance -{ - typedef char(&yes)[2]; - - template struct Exists; - - template - static yes CheckMember(Exists*); - template - static char CheckMember(...); - -public: - static const bool value = (sizeof(CheckMember(0)) == sizeof(yes)); -}; - -template -struct int_sequence -{ - -}; - -template -struct make_int_sequence : make_int_sequence -{ - -}; - -template -struct make_int_sequence<0, Is...> : int_sequence -{ - -}; - -template -struct placeholder_template -{ - -}; - -namespace std -{ - -template -struct is_placeholder> : integral_constant -{ - -}; - -} - -template -struct seq -{ -}; - -template -struct gens : gens -{ -}; - -template -struct gens<0, Is...> : seq -{ -}; - -namespace dp14 -{ - -template -class hash -{ -public: - template - size_t operator()(Args&&... args) const - { - size_t h = 0; - _hash_forwarding(h, std::forward(args)...); - return h; - } - -protected: - template - void _combine_hash(size_t& seed, U&& x) const - { - seed ^= std::hash()(std::forward(x)) + 0x9e3779b9 + (seed << 6) + (seed >> 2); - } - - template - void _hash_forwarding(size_t& h, U&& parm, Args&&... args) const - { - _combine_hash(h, std::forward(parm)); - _hash_forwarding(h, std::forward(args)...); - } - - template - void _hash_forwarding(size_t& h, U&& parm) const - { - _combine_hash(h, std::forward(parm)); - } -}; - -} - -#endif - diff --git a/node_modules/metacommon/compile.sh b/node_modules/metacommon/compile.sh deleted file mode 100644 index cd0db0b..0000000 --- a/node_modules/metacommon/compile.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -echo ok - diff --git a/node_modules/metacommon/package.json b/node_modules/metacommon/package.json deleted file mode 100644 index 3aa120f..0000000 --- a/node_modules/metacommon/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "metacommon", - "version": "1.0.1", - "description": "C++ Header-only repository with macros and very much metaprogramming", - "repository": { - "type": "git", - "url": "git+https://github.com/makiolo/metacommon.git" - }, - "scripts": { - "ci": "curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash", - "docker": "curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh | bash", - "clean": "cmaki clean", - "setup": "echo ok", - "compile": "echo ok", - "install": "echo ok", - "test": "echo ok", - "upload": "cmaki upload" - }, - "keywords": [ - "c++", - "metaprogramming" - ], - "author": "Ricardo Marmolejo García", - "license": "MIT", - "bugs": { - "url": "https://github.com/makiolo/metacommon/issues" - }, - "devDependencies": { - "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" - }, - "homepage": "https://github.com/makiolo/metacommon#readme" -} diff --git a/node_modules/metacommon/setup.sh b/node_modules/metacommon/setup.sh deleted file mode 100644 index cd0db0b..0000000 --- a/node_modules/metacommon/setup.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -echo ok - diff --git a/node_modules/micromatch/LICENSE b/node_modules/micromatch/LICENSE deleted file mode 100755 index 9af4a67..0000000 --- a/node_modules/micromatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/micromatch/README.md b/node_modules/micromatch/README.md deleted file mode 100644 index d72a059..0000000 --- a/node_modules/micromatch/README.md +++ /dev/null @@ -1,1024 +0,0 @@ -# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Tests](https://github.com/micromatch/micromatch/actions/workflows/test.yml/badge.svg)](https://github.com/micromatch/micromatch/actions/workflows/test.yml) - -> Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Table of Contents - -
-Details - - * [Install](#install) -- [Sponsors](#sponsors) - * [Gold Sponsors](#gold-sponsors) - * [Quickstart](#quickstart) - * [Why use micromatch?](#why-use-micromatch) - + [Matching features](#matching-features) - * [Switching to micromatch](#switching-to-micromatch) - + [From minimatch](#from-minimatch) - + [From multimatch](#from-multimatch) - * [API](#api) - * [Options](#options) - * [Options Examples](#options-examples) - + [options.basename](#optionsbasename) - + [options.bash](#optionsbash) - + [options.expandRange](#optionsexpandrange) - + [options.format](#optionsformat) - + [options.ignore](#optionsignore) - + [options.matchBase](#optionsmatchbase) - + [options.noextglob](#optionsnoextglob) - + [options.nonegate](#optionsnonegate) - + [options.noglobstar](#optionsnoglobstar) - + [options.nonull](#optionsnonull) - + [options.nullglob](#optionsnullglob) - + [options.onIgnore](#optionsonignore) - + [options.onMatch](#optionsonmatch) - + [options.onResult](#optionsonresult) - + [options.posixSlashes](#optionsposixslashes) - + [options.unescape](#optionsunescape) - * [Extended globbing](#extended-globbing) - + [Extglobs](#extglobs) - + [Braces](#braces) - + [Regex character classes](#regex-character-classes) - + [Regex groups](#regex-groups) - + [POSIX bracket expressions](#posix-bracket-expressions) - * [Notes](#notes) - + [Bash 4.3 parity](#bash-43-parity) - + [Backslashes](#backslashes) - * [Benchmarks](#benchmarks) - + [Running benchmarks](#running-benchmarks) - + [Latest results](#latest-results) - * [Contributing](#contributing) - * [About](#about) - -
- -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save micromatch -``` - -
- -# Sponsors - -[Become a Sponsor](https://github.com/sponsors/jonschlinkert) to add your logo to this README, or any of [my other projects](https://github.com/jonschlinkert?tab=repositories&q=&type=&language=&sort=stargazers) - -
- -## Quickstart - -```js -const micromatch = require('micromatch'); -// micromatch(list, patterns[, options]); -``` - -The [main export](#micromatch) takes a list of strings and one or more glob patterns: - -```js -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['f*', 'b*'])) //=> ['foo', 'bar', 'baz'] -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['*', '!b*'])) //=> ['foo', 'qux'] -``` - -Use [.isMatch()](#ismatch) to for boolean matching: - -```js -console.log(micromatch.isMatch('foo', 'f*')) //=> true -console.log(micromatch.isMatch('foo', ['b*', 'f*'])) //=> true -``` - -[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! - -
- -## Why use micromatch? - -> micromatch is a [replacement](#switching-to-micromatch) for minimatch and multimatch - -* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) -* More complete support for the Bash 4.3 specification than minimatch and multimatch. Micromatch passes _all of the spec tests_ from bash, including some that bash still fails. -* **Fast & Performant** - Loads in about 5ms and performs [fast matches](#benchmarks). -* **Glob matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories -* **[Advanced globbing](#extended-globbing)** - Supports [extglobs](#extglobs), [braces](#braces-1), and [POSIX brackets](#posix-bracket-expressions), and support for escaping special characters with `\` or quotes. -* **Accurate** - Covers more scenarios [than minimatch](https://github.com/yarnpkg/yarn/pull/3339) -* **Well tested** - More than 5,000 [test assertions](./test) -* **Windows support** - More reliable windows support than minimatch and multimatch. -* **[Safe](https://github.com/micromatch/braces#braces-is-safe)** - Micromatch is not subject to DoS with brace patterns like minimatch and multimatch. - -### Matching features - -* Support for multiple glob patterns (no need for wrappers like multimatch) -* Wildcards (`**`, `*.js`) -* Negation (`'!a/*.js'`, `'*!(b).js'`) -* [extglobs](#extglobs) (`+(x|y)`, `!(a|b)`) -* [POSIX character classes](#posix-bracket-expressions) (`[[:alpha:][:digit:]]`) -* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) -* regex character classes (`foo-[1-5].js`) -* regex logical "or" (`foo/(abc|xyz).js`) - -You can mix and match these features to create whatever patterns you need! - -## Switching to micromatch - -_(There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information.)_ - -### From minimatch - -Use [micromatch.isMatch()](#ismatch) instead of `minimatch()`: - -```js -console.log(micromatch.isMatch('foo', 'b*')); //=> false -``` - -Use [micromatch.match()](#match) instead of `minimatch.match()`: - -```js -console.log(micromatch.match(['foo', 'bar'], 'b*')); //=> 'bar' -``` - -### From multimatch - -Same signature: - -```js -console.log(micromatch(['foo', 'bar', 'baz'], ['f*', '*z'])); //=> ['foo', 'baz'] -``` - -## API - -**Params** - -* `list` **{String|Array}**: List of strings to match. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) -* `returns` **{Array}**: Returns an array of matches - -**Example** - -```js -const mm = require('micromatch'); -// mm(list, patterns[, options]); - -console.log(mm(['a.js', 'a.txt'], ['*.js'])); -//=> [ 'a.js' ] -``` - -### [.matcher](index.js#L109) - -Returns a matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. - -**Params** - -* `pattern` **{String}**: Glob pattern -* `options` **{Object}** -* `returns` **{Function}**: Returns a matcher function. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matcher(pattern[, options]); - -const isMatch = mm.matcher('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.isMatch](index.js#L128) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* `str` **{String}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `[options]` **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.isMatch(string, patterns[, options]); - -console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(mm.isMatch('a.a', 'b.*')); //=> false -``` - -### [.not](index.js#L153) - -Returns a list of strings that _**do not match any**_ of the given `patterns`. - -**Params** - -* `list` **{Array}**: Array of strings to match. -* `patterns` **{String|Array}**: One or more glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.not(list, patterns[, options]); - -console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); -//=> ['b.b', 'c.c'] -``` - -### [.contains](index.js#L193) - -Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. - -**Params** - -* `str` **{String}**: The string to match. -* `patterns` **{String|Array}**: Glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any of the patterns matches any part of `str`. - -**Example** - -```js -var mm = require('micromatch'); -// mm.contains(string, pattern[, options]); - -console.log(mm.contains('aa/bb/cc', '*b')); -//=> true -console.log(mm.contains('aa/bb/cc', '*d')); -//=> false -``` - -### [.matchKeys](index.js#L235) - -Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. - -**Params** - -* `object` **{Object}**: The object with keys to filter. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Object}**: Returns an object with only keys that match the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matchKeys(object, patterns[, options]); - -const obj = { aa: 'a', ab: 'b', ac: 'c' }; -console.log(mm.matchKeys(obj, '*b')); -//=> { ab: 'b' } -``` - -### [.some](index.js#L264) - -Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any `patterns` matches any of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.some(list, patterns[, options]); - -console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// true -console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.every](index.js#L300) - -Returns true if every string in the given `list` matches any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if all `patterns` matches all of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.every(list, patterns[, options]); - -console.log(mm.every('foo.js', ['foo.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// false -console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.all](index.js#L339) - -Returns true if **all** of the given `patterns` match the specified string. - -**Params** - -* `str` **{String|Array}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.all(string, patterns[, options]); - -console.log(mm.all('foo.js', ['foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', '!foo.js'])); -// false - -console.log(mm.all('foo.js', ['*.js', 'foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); -// true -``` - -### [.capture](index.js#L366) - -Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. - -**Params** - -* `glob` **{String}**: Glob pattern to use for matching. -* `input` **{String}**: String to match -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array|null}**: Returns an array of captures if the input matches the glob pattern, otherwise `null`. - -**Example** - -```js -const mm = require('micromatch'); -// mm.capture(pattern, string[, options]); - -console.log(mm.capture('test/*.js', 'test/foo.js')); -//=> ['foo'] -console.log(mm.capture('test/*.js', 'foo/bar.css')); -//=> null -``` - -### [.makeRe](index.js#L392) - -Create a regular expression from the given glob `pattern`. - -**Params** - -* `pattern` **{String}**: A glob pattern to convert to regex. -* `options` **{Object}** -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const mm = require('micromatch'); -// mm.makeRe(pattern[, options]); - -console.log(mm.makeRe('*.js')); -//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ -``` - -### [.scan](index.js#L408) - -Scan a glob pattern to separate the pattern into segments. Used by the [split](#split) method. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.scan(pattern[, options]); -``` - -### [.parse](index.js#L424) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `glob` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as regex source string. - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.parse(pattern[, options]); -``` - -### [.braces](index.js#L451) - -Process the given brace `pattern`. - -**Params** - -* `pattern` **{String}**: String with brace pattern to process. -* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. -* `returns` **{Array}** - -**Example** - -```js -const { braces } = require('micromatch'); -console.log(braces('foo/{a,b,c}/bar')); -//=> [ 'foo/(a|b|c)/bar' ] - -console.log(braces('foo/{a,b,c}/bar', { expand: true })); -//=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] -``` - -## Options - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Match dotfiles. Otherwise dotfiles are ignored unless a `.` is explicitly defined in the pattern. | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. This option is overridden by the `expandBrace` option. | -| `failglob` | `boolean` | `false` | Similar to the `failglob` behavior in Bash, throws an error when no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `boolean` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `lookbehinds` | `boolean` | `true` | Support regex positive and negative lookbehinds. Note that you must be using Node 8.1.10 or higher to enable regex lookbehinds. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Perform case-insensitive matching. Equivalent to the regex `i` flag. Note that this option is ignored when the `flags` option is defined. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with [extglobs](#extglobs) (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support [POSIX character classes](#posix-bracket-expressions) ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `string` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove preceding backslashes from escaped glob characters before creating the regular expression to perform matches. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatitibility. | - -## Options Examples - -### options.basename - -Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. - -**Type**: `Boolean` - -**Default**: `false` - -**Example** - -```js -micromatch(['a/b.js', 'a/c.md'], '*.js'); -//=> [] - -micromatch(['a/b.js', 'a/c.md'], '*.js', { basename: true }); -//=> ['a/b.js'] -``` - -### options.bash - -Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as any other star. - -**Type**: `Boolean` - -**Default**: `true` - -**Example** - -```js -const files = ['abc', 'ajz']; -console.log(micromatch(files, '[a-c]*')); -//=> ['abc', 'ajz'] - -console.log(micromatch(files, '[a-c]*', { bash: false })); -``` - -### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a numeric folder name between `01` and `25`, with leading zeros. - -```js -const fill = require('fill-range'); -const regex = micromatch.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex) -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')) //=> true -``` - -### options.ignore - -String or array of glob patterns to match files to ignore. - -**Type**: `String|Array` - -**Default**: `undefined` - -```js -const isMatch = micromatch.matcher('*', { ignore: 'f*' }); -console.log(isMatch('foo')) //=> false -console.log(isMatch('bar')) //=> true -console.log(isMatch('baz')) //=> true -``` - -### options.matchBase - -Alias for [options.basename](#options-basename). - -### options.noextglob - -Disable extglob support, so that [extglobs](#extglobs) are regarded as literal characters. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Examples** - -```js -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)')); -//=> ['a/b', 'a/!(z)'] - -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', { noextglob: true })); -//=> ['a/!(z)'] (matches only as literal characters) -``` - -### options.nonegate - -Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.noglobstar - -Disable matching with globstars (`**`). - -**Type**: `Boolean` - -**Default**: `undefined` - -```js -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); -//=> ['a/b', 'a/b/c', 'a/b/c/d'] - -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); -//=> ['a/b'] -``` - -### options.nonull - -Alias for [options.nullglob](#options-nullglob). - -### options.nullglob - -If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); - // { glob: '*', regex: /^(?:(?!\.)(?=.)[^\/]*?\/?)$/, input: 'foo', output: 'foo' } -}; - -const isMatch = micromatch.matcher('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ input, output }); - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } -}; - -const isMatch = micromatch.matcher('**', { onMatch, posixSlashes: true }); -isMatch('some\\path'); -isMatch('some\\path'); -isMatch('some\\path'); -``` - -### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = micromatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.posixSlashes - -Convert path separators on returned files to posix/unix-style forward slashes. Aliased as `unixify` for backwards compatibility. - -**Type**: `Boolean` - -**Default**: `true` on windows, `false` everywhere else. - -**Example** - -```js -console.log(micromatch.match(['a\\b\\c'], 'a/**')); -//=> ['a/b/c'] - -console.log(micromatch.match(['a\\b\\c'], { posixSlashes: false })); -//=> ['a\\b\\c'] -``` - -### options.unescape - -Remove backslashes from escaped glob characters before creating the regular expression to perform matches. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Example** - -In this example we want to match a literal `*`: - -```js -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c')); -//=> ['a\\*c'] - -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c', { unescape: true })); -//=> ['a*c'] -``` - -
-
- -## Extended globbing - -Micromatch supports the following extended globbing features. - -### Extglobs - -Extended globbing, as described by the bash man page: - -| **pattern** | **regex equivalent** | **description** | -| --- | --- | --- | -| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | -| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | -| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | -| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | -| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | - -* Note that `@` isn't a regex character. - -### Braces - -Brace patterns can be used to match specific ranges or sets of characters. - -**Example** - -The pattern `{f,b}*/{1..3}/{b,q}*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. - -### Regex character classes - -Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` -* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` -* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` - -Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). - -### Regex groups - -Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` -* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` -* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` - -As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. - -### POSIX bracket expressions - -POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. - -**Example** - -```js -console.log(micromatch.isMatch('a1', '[[:alpha:][:digit:]]')) //=> true -console.log(micromatch.isMatch('a1', '[[:alpha:][:alpha:]]')) //=> false -``` - -*** - -## Notes - -### Bash 4.3 parity - -Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. - -However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. - -### Backslashes - -There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. - -* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows, which is consistent with bash behavior. _More importantly, unescaping globs can result in unsafe regular expressions_. -* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. - -We made this decision for micromatch for a couple of reasons: - -* Consistency with bash conventions. -* Glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. - -**A note about joining paths to globs** - -Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. - -In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. - -To solve this, you might be inspired to do something like `'foo\\*'.replace(/\\/g, '/')`, but this causes another, potentially much more serious, problem. - -## Benchmarks - -### Running benchmarks - -Install dependencies for running benchmarks: - -```sh -$ cd bench && npm install -``` - -Run the benchmarks: - -```sh -$ npm run bench -``` - -### Latest results - -As of August 23, 2024 (longer bars are better): - -```sh -# .makeRe star - micromatch x 2,232,802 ops/sec ±2.34% (89 runs sampled)) - minimatch x 781,018 ops/sec ±6.74% (92 runs sampled)) - -# .makeRe star; dot=true - micromatch x 1,863,453 ops/sec ±0.74% (93 runs sampled) - minimatch x 723,105 ops/sec ±0.75% (93 runs sampled) - -# .makeRe globstar - micromatch x 1,624,179 ops/sec ±2.22% (91 runs sampled) - minimatch x 1,117,230 ops/sec ±2.78% (86 runs sampled)) - -# .makeRe globstars - micromatch x 1,658,642 ops/sec ±0.86% (92 runs sampled) - minimatch x 741,224 ops/sec ±1.24% (89 runs sampled)) - -# .makeRe with leading star - micromatch x 1,525,014 ops/sec ±1.63% (90 runs sampled) - minimatch x 561,074 ops/sec ±3.07% (89 runs sampled) - -# .makeRe - braces - micromatch x 172,478 ops/sec ±2.37% (78 runs sampled) - minimatch x 96,087 ops/sec ±2.34% (88 runs sampled))) - -# .makeRe braces - range (expanded) - micromatch x 26,973 ops/sec ±0.84% (89 runs sampled) - minimatch x 3,023 ops/sec ±0.99% (90 runs sampled)) - -# .makeRe braces - range (compiled) - micromatch x 152,892 ops/sec ±1.67% (83 runs sampled) - minimatch x 992 ops/sec ±3.50% (89 runs sampled)d)) - -# .makeRe braces - nested ranges (expanded) - micromatch x 15,816 ops/sec ±13.05% (80 runs sampled) - minimatch x 2,953 ops/sec ±1.64% (91 runs sampled) - -# .makeRe braces - nested ranges (compiled) - micromatch x 110,881 ops/sec ±1.85% (82 runs sampled) - minimatch x 1,008 ops/sec ±1.51% (91 runs sampled) - -# .makeRe braces - set (compiled) - micromatch x 134,930 ops/sec ±3.54% (63 runs sampled)) - minimatch x 43,242 ops/sec ±0.60% (93 runs sampled) - -# .makeRe braces - nested sets (compiled) - micromatch x 94,455 ops/sec ±1.74% (69 runs sampled)) - minimatch x 27,720 ops/sec ±1.84% (93 runs sampled)) -``` - -## Contributing - -All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. - -**Bug reports** - -Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: - -* [research existing issues first](../../issues) (open and closed) -* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern -* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js -* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. - -**Platform issues** - -It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") -* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/micromatch/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") -* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 523 | [jonschlinkert](https://github.com/jonschlinkert) | -| 12 | [es128](https://github.com/es128) | -| 9 | [danez](https://github.com/danez) | -| 8 | [doowb](https://github.com/doowb) | -| 6 | [paulmillr](https://github.com/paulmillr) | -| 5 | [mrmlnc](https://github.com/mrmlnc) | -| 3 | [DrPizza](https://github.com/DrPizza) | -| 2 | [Tvrqvoise](https://github.com/Tvrqvoise) | -| 2 | [antonyk](https://github.com/antonyk) | -| 2 | [MartinKolarik](https://github.com/MartinKolarik) | -| 2 | [Glazy](https://github.com/Glazy) | -| 2 | [mceIdo](https://github.com/mceIdo) | -| 2 | [TrySound](https://github.com/TrySound) | -| 1 | [yvele](https://github.com/yvele) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | -| 1 | [simlu](https://github.com/simlu) | -| 1 | [curbengh](https://github.com/curbengh) | -| 1 | [fidian](https://github.com/fidian) | -| 1 | [tomByrer](https://github.com/tomByrer) | -| 1 | [ZoomerTedJackson](https://github.com/ZoomerTedJackson) | -| 1 | [styfle](https://github.com/styfle) | -| 1 | [sebdeckers](https://github.com/sebdeckers) | -| 1 | [muescha](https://github.com/muescha) | -| 1 | [juszczykjakub](https://github.com/juszczykjakub) | -| 1 | [joyceerhl](https://github.com/joyceerhl) | -| 1 | [donatj](https://github.com/donatj) | -| 1 | [frangio](https://github.com/frangio) | -| 1 | [UltCombo](https://github.com/UltCombo) | -| 1 | [DianeLooney](https://github.com/DianeLooney) | -| 1 | [devongovett](https://github.com/devongovett) | -| 1 | [Cslove](https://github.com/Cslove) | -| 1 | [amilajack](https://github.com/amilajack) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2024, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on August 23, 2024._ \ No newline at end of file diff --git a/node_modules/micromatch/index.js b/node_modules/micromatch/index.js deleted file mode 100644 index cb9d9ef..0000000 --- a/node_modules/micromatch/index.js +++ /dev/null @@ -1,474 +0,0 @@ -'use strict'; - -const util = require('util'); -const braces = require('braces'); -const picomatch = require('picomatch'); -const utils = require('picomatch/lib/utils'); - -const isEmptyString = v => v === '' || v === './'; -const hasBraces = v => { - const index = v.indexOf('{'); - return index > -1 && v.indexOf('}', index) > -1; -}; - -/** - * Returns an array of strings that match one or more glob patterns. - * - * ```js - * const mm = require('micromatch'); - * // mm(list, patterns[, options]); - * - * console.log(mm(['a.js', 'a.txt'], ['*.js'])); - * //=> [ 'a.js' ] - * ``` - * @param {String|Array} `list` List of strings to match. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) - * @return {Array} Returns an array of matches - * @summary false - * @api public - */ - -const micromatch = (list, patterns, options) => { - patterns = [].concat(patterns); - list = [].concat(list); - - let omit = new Set(); - let keep = new Set(); - let items = new Set(); - let negatives = 0; - - let onResult = state => { - items.add(state.output); - if (options && options.onResult) { - options.onResult(state); - } - }; - - for (let i = 0; i < patterns.length; i++) { - let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); - let negated = isMatch.state.negated || isMatch.state.negatedExtglob; - if (negated) negatives++; - - for (let item of list) { - let matched = isMatch(item, true); - - let match = negated ? !matched.isMatch : matched.isMatch; - if (!match) continue; - - if (negated) { - omit.add(matched.output); - } else { - omit.delete(matched.output); - keep.add(matched.output); - } - } - } - - let result = negatives === patterns.length ? [...items] : [...keep]; - let matches = result.filter(item => !omit.has(item)); - - if (options && matches.length === 0) { - if (options.failglob === true) { - throw new Error(`No matches found for "${patterns.join(', ')}"`); - } - - if (options.nonull === true || options.nullglob === true) { - return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; - } - } - - return matches; -}; - -/** - * Backwards compatibility - */ - -micromatch.match = micromatch; - -/** - * Returns a matcher function from the given glob `pattern` and `options`. - * The returned function takes a string to match as its only argument and returns - * true if the string is a match. - * - * ```js - * const mm = require('micromatch'); - * // mm.matcher(pattern[, options]); - * - * const isMatch = mm.matcher('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @param {String} `pattern` Glob pattern - * @param {Object} `options` - * @return {Function} Returns a matcher function. - * @api public - */ - -micromatch.matcher = (pattern, options) => picomatch(pattern, options); - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const mm = require('micromatch'); - * // mm.isMatch(string, patterns[, options]); - * - * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(mm.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `[options]` See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Backwards compatibility - */ - -micromatch.any = micromatch.isMatch; - -/** - * Returns a list of strings that _**do not match any**_ of the given `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.not(list, patterns[, options]); - * - * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); - * //=> ['b.b', 'c.c'] - * ``` - * @param {Array} `list` Array of strings to match. - * @param {String|Array} `patterns` One or more glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array} Returns an array of strings that **do not match** the given patterns. - * @api public - */ - -micromatch.not = (list, patterns, options = {}) => { - patterns = [].concat(patterns).map(String); - let result = new Set(); - let items = []; - - let onResult = state => { - if (options.onResult) options.onResult(state); - items.push(state.output); - }; - - let matches = new Set(micromatch(list, patterns, { ...options, onResult })); - - for (let item of items) { - if (!matches.has(item)) { - result.add(item); - } - } - return [...result]; -}; - -/** - * Returns true if the given `string` contains the given pattern. Similar - * to [.isMatch](#isMatch) but the pattern can match any part of the string. - * - * ```js - * var mm = require('micromatch'); - * // mm.contains(string, pattern[, options]); - * - * console.log(mm.contains('aa/bb/cc', '*b')); - * //=> true - * console.log(mm.contains('aa/bb/cc', '*d')); - * //=> false - * ``` - * @param {String} `str` The string to match. - * @param {String|Array} `patterns` Glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any of the patterns matches any part of `str`. - * @api public - */ - -micromatch.contains = (str, pattern, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - if (Array.isArray(pattern)) { - return pattern.some(p => micromatch.contains(str, p, options)); - } - - if (typeof pattern === 'string') { - if (isEmptyString(str) || isEmptyString(pattern)) { - return false; - } - - if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { - return true; - } - } - - return micromatch.isMatch(str, pattern, { ...options, contains: true }); -}; - -/** - * Filter the keys of the given object with the given `glob` pattern - * and `options`. Does not attempt to match nested keys. If you need this feature, - * use [glob-object][] instead. - * - * ```js - * const mm = require('micromatch'); - * // mm.matchKeys(object, patterns[, options]); - * - * const obj = { aa: 'a', ab: 'b', ac: 'c' }; - * console.log(mm.matchKeys(obj, '*b')); - * //=> { ab: 'b' } - * ``` - * @param {Object} `object` The object with keys to filter. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Object} Returns an object with only keys that match the given patterns. - * @api public - */ - -micromatch.matchKeys = (obj, patterns, options) => { - if (!utils.isObject(obj)) { - throw new TypeError('Expected the first argument to be an object'); - } - let keys = micromatch(Object.keys(obj), patterns, options); - let res = {}; - for (let key of keys) res[key] = obj[key]; - return res; -}; - -/** - * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.some(list, patterns[, options]); - * - * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // true - * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` - * @api public - */ - -micromatch.some = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (items.some(item => isMatch(item))) { - return true; - } - } - return false; -}; - -/** - * Returns true if every string in the given `list` matches - * any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.every(list, patterns[, options]); - * - * console.log(mm.every('foo.js', ['foo.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // false - * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` - * @api public - */ - -micromatch.every = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (!items.every(item => isMatch(item))) { - return false; - } - } - return true; -}; - -/** - * Returns true if **all** of the given `patterns` match - * the specified string. - * - * ```js - * const mm = require('micromatch'); - * // mm.all(string, patterns[, options]); - * - * console.log(mm.all('foo.js', ['foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); - * // false - * - * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); - * // true - * ``` - * @param {String|Array} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.all = (str, patterns, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - return [].concat(patterns).every(p => picomatch(p, options)(str)); -}; - -/** - * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. - * - * ```js - * const mm = require('micromatch'); - * // mm.capture(pattern, string[, options]); - * - * console.log(mm.capture('test/*.js', 'test/foo.js')); - * //=> ['foo'] - * console.log(mm.capture('test/*.js', 'foo/bar.css')); - * //=> null - * ``` - * @param {String} `glob` Glob pattern to use for matching. - * @param {String} `input` String to match - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. - * @api public - */ - -micromatch.capture = (glob, input, options) => { - let posix = utils.isWindows(options); - let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); - let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); - - if (match) { - return match.slice(1).map(v => v === void 0 ? '' : v); - } -}; - -/** - * Create a regular expression from the given glob `pattern`. - * - * ```js - * const mm = require('micromatch'); - * // mm.makeRe(pattern[, options]); - * - * console.log(mm.makeRe('*.js')); - * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ - * ``` - * @param {String} `pattern` A glob pattern to convert to regex. - * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -micromatch.makeRe = (...args) => picomatch.makeRe(...args); - -/** - * Scan a glob pattern to separate the pattern into segments. Used - * by the [split](#split) method. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.scan(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -micromatch.scan = (...args) => picomatch.scan(...args); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.parse(pattern[, options]); - * ``` - * @param {String} `glob` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as regex source string. - * @api public - */ - -micromatch.parse = (patterns, options) => { - let res = []; - for (let pattern of [].concat(patterns || [])) { - for (let str of braces(String(pattern), options)) { - res.push(picomatch.parse(str, options)); - } - } - return res; -}; - -/** - * Process the given brace `pattern`. - * - * ```js - * const { braces } = require('micromatch'); - * console.log(braces('foo/{a,b,c}/bar')); - * //=> [ 'foo/(a|b|c)/bar' ] - * - * console.log(braces('foo/{a,b,c}/bar', { expand: true })); - * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] - * ``` - * @param {String} `pattern` String with brace pattern to process. - * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. - * @return {Array} - * @api public - */ - -micromatch.braces = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - if ((options && options.nobrace === true) || !hasBraces(pattern)) { - return [pattern]; - } - return braces(pattern, options); -}; - -/** - * Expand braces - */ - -micromatch.braceExpand = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - return micromatch.braces(pattern, { ...options, expand: true }); -}; - -/** - * Expose micromatch - */ - -// exposed for tests -micromatch.hasBraces = hasBraces; -module.exports = micromatch; diff --git a/node_modules/micromatch/package.json b/node_modules/micromatch/package.json deleted file mode 100644 index d5558bb..0000000 --- a/node_modules/micromatch/package.json +++ /dev/null @@ -1,119 +0,0 @@ -{ - "name": "micromatch", - "description": "Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch.", - "version": "4.0.8", - "homepage": "https://github.com/micromatch/micromatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "(https://github.com/DianeLooney)", - "Amila Welihinda (amilajack.com)", - "Bogdan Chadkin (https://github.com/TrySound)", - "Brian Woodward (https://twitter.com/doowb)", - "Devon Govett (http://badassjs.com)", - "Elan Shanker (https://github.com/es128)", - "Fabrício Matté (https://ultcombo.js.org)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Martin Kolárik (https://kolarik.sk)", - "Olsten Larck (https://i.am.charlike.online)", - "Paul Miller (paulmillr.com)", - "Tom Byrer (https://github.com/tomByrer)", - "Tyler Akins (http://rumkin.com)", - "Peter Bright (https://github.com/drpizza)", - "Kuba Juszczyk (https://github.com/ku8ar)" - ], - "repository": "micromatch/micromatch", - "bugs": { - "url": "https://github.com/micromatch/micromatch/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "devDependencies": { - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "minimatch": "^5.0.1", - "mocha": "^9.2.2", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "bash", - "bracket", - "character-class", - "expand", - "expansion", - "expression", - "extglob", - "extglobs", - "file", - "files", - "filter", - "find", - "glob", - "globbing", - "globs", - "globstar", - "lookahead", - "lookaround", - "lookbehind", - "match", - "matcher", - "matches", - "matching", - "micromatch", - "minimatch", - "multimatch", - "negate", - "negation", - "path", - "pattern", - "patterns", - "posix", - "regex", - "regexp", - "regular", - "shell", - "star", - "wildcard" - ], - "verb": { - "toc": "collapsible", - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "nanomatch" - ] - }, - "reflinks": [ - "extglob", - "fill-range", - "glob-object", - "minimatch", - "multimatch" - ] - } -} diff --git a/node_modules/mimic-fn/index.d.ts b/node_modules/mimic-fn/index.d.ts deleted file mode 100644 index b4047d5..0000000 --- a/node_modules/mimic-fn/index.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -declare const mimicFn: { - /** - Make a function mimic another one. It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. - - @param to - Mimicking function. - @param from - Function to mimic. - @returns The modified `to` function. - - @example - ``` - import mimicFn = require('mimic-fn'); - - function foo() {} - foo.unicorn = '🦄'; - - function wrapper() { - return foo(); - } - - console.log(wrapper.name); - //=> 'wrapper' - - mimicFn(wrapper, foo); - - console.log(wrapper.name); - //=> 'foo' - - console.log(wrapper.unicorn); - //=> '🦄' - ``` - */ - < - ArgumentsType extends unknown[], - ReturnType, - FunctionType extends (...arguments: ArgumentsType) => ReturnType - >( - to: (...arguments: ArgumentsType) => ReturnType, - from: FunctionType - ): FunctionType; - - // TODO: Remove this for the next major release, refactor the whole definition to: - // declare function mimicFn< - // ArgumentsType extends unknown[], - // ReturnType, - // FunctionType extends (...arguments: ArgumentsType) => ReturnType - // >( - // to: (...arguments: ArgumentsType) => ReturnType, - // from: FunctionType - // ): FunctionType; - // export = mimicFn; - default: typeof mimicFn; -}; - -export = mimicFn; diff --git a/node_modules/mimic-fn/index.js b/node_modules/mimic-fn/index.js deleted file mode 100644 index 1a59705..0000000 --- a/node_modules/mimic-fn/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict'; - -const mimicFn = (to, from) => { - for (const prop of Reflect.ownKeys(from)) { - Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop)); - } - - return to; -}; - -module.exports = mimicFn; -// TODO: Remove this for the next major release -module.exports.default = mimicFn; diff --git a/node_modules/mimic-fn/license b/node_modules/mimic-fn/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/mimic-fn/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mimic-fn/package.json b/node_modules/mimic-fn/package.json deleted file mode 100644 index 199d2c7..0000000 --- a/node_modules/mimic-fn/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "mimic-fn", - "version": "2.1.0", - "description": "Make a function mimic another one", - "license": "MIT", - "repository": "sindresorhus/mimic-fn", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "function", - "mimic", - "imitate", - "rename", - "copy", - "inherit", - "properties", - "name", - "func", - "fn", - "set", - "infer", - "change" - ], - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/mimic-fn/readme.md b/node_modules/mimic-fn/readme.md deleted file mode 100644 index 0ef8a13..0000000 --- a/node_modules/mimic-fn/readme.md +++ /dev/null @@ -1,69 +0,0 @@ -# mimic-fn [![Build Status](https://travis-ci.org/sindresorhus/mimic-fn.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-fn) - -> Make a function mimic another one - -Useful when you wrap a function in another function and like to preserve the original name and other properties. - - -## Install - -``` -$ npm install mimic-fn -``` - - -## Usage - -```js -const mimicFn = require('mimic-fn'); - -function foo() {} -foo.unicorn = '🦄'; - -function wrapper() { - return foo(); -} - -console.log(wrapper.name); -//=> 'wrapper' - -mimicFn(wrapper, foo); - -console.log(wrapper.name); -//=> 'foo' - -console.log(wrapper.unicorn); -//=> '🦄' -``` - - -## API - -It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. - -### mimicFn(to, from) - -Modifies the `to` function and returns it. - -#### to - -Type: `Function` - -Mimicking function. - -#### from - -Type: `Function` - -Function to mimic. - - -## Related - -- [rename-fn](https://github.com/sindresorhus/rename-fn) - Rename a function -- [keep-func-props](https://github.com/ehmicky/keep-func-props) - Wrap a function without changing its name, length and other properties - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/.travis.yml b/node_modules/npm-mas-mas/.travis.yml deleted file mode 100644 index 5c04817..0000000 --- a/node_modules/npm-mas-mas/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: c -services: docker -os: linux -env: - - PACKAGE=python TARGET=linux64 MODE=Debug - - PACKAGE=dune-freetype TARGET=linux64 MODE=Debug - - PACKAGE=haxx-libcurl TARGET=linux64 MODE=Debug - - PACKAGE=fmod TARGET=linux64 MODE=Debug - - PACKAGE=intel-tbb TARGET=linux64 MODE=Debug - - PACKAGE=cryptopp TARGET=linux64 MODE=Debug - - PACKAGE=ois TARGET=linux64 MODE=Debug - - PACKAGE=bullet2 TARGET=linux64 MODE=Debug -script: - - PACKAGE=$PACKAGE make $TARGET - diff --git a/node_modules/npm-mas-mas/LICENSE b/node_modules/npm-mas-mas/LICENSE deleted file mode 100644 index 9e0fb24..0000000 --- a/node_modules/npm-mas-mas/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/Makefile b/node_modules/npm-mas-mas/Makefile deleted file mode 100644 index bfa5464..0000000 --- a/node_modules/npm-mas-mas/Makefile +++ /dev/null @@ -1,21 +0,0 @@ -PACKAGE ?= . -MODE ?= Debug - -all: clean build - -build: - (cd cmaki_identifier && npm install --unsafe-perm) - (cd cmaki_generator && ./build ${PACKAGE} -d) - -clean: - (cd cmaki_identifier && rm -Rf bin rm -Rf artifacts) - -linux64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} linux64 make - -windows64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} windows64 make - -android64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} android64 make - diff --git a/node_modules/npm-mas-mas/README b/node_modules/npm-mas-mas/README deleted file mode 100644 index 75a7863..0000000 --- a/node_modules/npm-mas-mas/README +++ /dev/null @@ -1,57 +0,0 @@ -# fusion projects in one product - -- cmaki -- cmaki_scripts -- cmaki_identifier -- cmaki_docker -- cmaki_generator -- servfactor - -# variables de entorno -- Servidor de artefactos: -- NPP_SERVER = htpp://.... - -- Modo de compilación: -- NPP_MODE = Debug, Release .... - -- Directorio de instalación: -- NPP_INSTALL - -- Utilizar artefactos cacheados o compilar siempre: -- NPP_CACHE=TRUE/FALSE - - - - - - -refactor cmake ------------------------------------ - -cmaki_library ---------> npp_shared -cmaki_static_library --> npp_static -cmaki_executable ------> npp_executable -cmaki_test ------------> npp_test -cmaki_google_test -----> npp_google_test -cmaki_python_test -----> npp_python_test - - - -Comandos uso ------------- -npm install -npm test - -npm run create # crear package -npm run upload # subir package - - -windows environment ------------------- -visual studio 2019 -mini conda -npm -cmake -pip install conan -chocolatey -choco install tortoisegit diff --git a/node_modules/npm-mas-mas/cmaki/.travis.yml b/node_modules/npm-mas-mas/cmaki/.travis.yml deleted file mode 100644 index 44de95c..0000000 --- a/node_modules/npm-mas-mas/cmaki/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: c -services: docker -os: linux -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki/GitUtils.cmake b/node_modules/npm-mas-mas/cmaki/GitUtils.cmake deleted file mode 100644 index 4bfc61e..0000000 --- a/node_modules/npm-mas-mas/cmaki/GitUtils.cmake +++ /dev/null @@ -1,157 +0,0 @@ -cmake_minimum_required(VERSION 2.8.7) - -include("${CMAKE_CURRENT_LIST_DIR}/Utils.cmake") -include(CMakeParseArguments) - -find_package(Git) -if(NOT GIT_FOUND) - message(FATAL_ERROR "git not found!") -endif() - - -# clone a git repo into a directory at configure time -# this can be useful for including cmake-library projects that contain *.cmake files -# the function will automatically init git submodules too -# -# ATTENTION: CMakeLists-files in the cloned repo will NOT be build automatically -# -# why not use ExternalProject_Add you ask? because we need to run this at configure time -# -# USAGE: -# git_clone( -# PROJECT_NAME -# GIT_URL -# [GIT_TAG|GIT_BRANCH|GIT_COMMIT ] -# [DIRECTORY ] -# [QUIET] -# ) -# -# -# ARGUMENTS: -# PROJECT_NAME -# name of the project that will be used in output variables. -# must be the same as the git directory/repo name -# -# GIT_URL -# url to the git repo -# -# GIT_TAG|GIT_BRANCH|GIT_COMMIT -# optional -# the tag/branch/commit to checkout -# default is master -# -# DIRECTORY -# optional -# the directory the project will be cloned into -# default is the build directory, similar to ExternalProject (${CMAKE_BINARY_DIR}) -# -# QUIET -# optional -# don't print status messages -# -# -# OUTPUT VARIABLES: -# _SOURCE_DIR -# top level source directory of the cloned project -# -# -# EXAMPLE: -# git_clone( -# PROJECT_NAME testProj -# GIT_URL https://github.com/test/test.git -# GIT_COMMIT a1b2c3 -# DIRECTORY ${CMAKE_BINARY_DIR} -# QUIET -# ) -# -# include(${testProj_SOURCE_DIR}/cmake/myFancyLib.cmake) - -function(cmaki_git_clone) - - cmake_parse_arguments( - PARGS # prefix of output variables - "QUIET" # list of names of the boolean arguments (only defined ones will be true) - "PROJECT_NAME;GIT_URL;GIT_TAG;GIT_BRANCH;GIT_COMMIT;DIRECTORY" # list of names of mono-valued arguments - "" # list of names of multi-valued arguments (output variables are lists) - ${ARGN} # arguments of the function to parse, here we take the all original ones - ) # remaining unparsed arguments can be found in PARGS_UNPARSED_ARGUMENTS - - if(NOT PARGS_PROJECT_NAME) - message(FATAL_ERROR "You must provide a project name") - endif() - - if(NOT PARGS_GIT_URL) - message(FATAL_ERROR "You must provide a git url") - endif() - - if(NOT PARGS_DIRECTORY) - set(PARGS_DIRECTORY ${CMAKE_BINARY_DIR}) - endif() - - set(${PARGS_PROJECT_NAME}_SOURCE_DIR - ${PARGS_DIRECTORY}/${PARGS_PROJECT_NAME} - CACHE INTERNAL "" FORCE) # makes var visible everywhere because PARENT_SCOPE wouldn't include this scope - - set(SOURCE_DIR ${PARGS_PROJECT_NAME}_SOURCE_DIR) - - # check that only one of GIT_TAG xor GIT_BRANCH xor GIT_COMMIT was passed - at_most_one(at_most_one_tag ${PARGS_GIT_TAG} ${PARGS_GIT_BRANCH} ${PARGS_GIT_COMMIT}) - - if(NOT at_most_one_tag) - message(FATAL_ERROR "you can only provide one of GIT_TAG, GIT_BRANCH or GIT_COMMIT") - endif() - - if(NOT PARGS_QUIET) - message(STATUS "downloading/updating ${PARGS_PROJECT_NAME}") - endif() - - # first clone the repo - if(EXISTS ${${SOURCE_DIR}}) - if(NOT PARGS_QUIET) - message(STATUS "${PARGS_PROJECT_NAME} directory found, pulling...") - endif() - - execute_process( - COMMAND ${GIT_EXECUTABLE} pull origin master - COMMAND ${GIT_EXECUTABLE} submodule update --remote - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - else() - if(NOT PARGS_QUIET) - message(STATUS "${PARGS_PROJECT_NAME} directory not found, cloning...") - endif() - - execute_process( - COMMAND ${GIT_EXECUTABLE} clone ${PARGS_GIT_URL} --recursive - WORKING_DIRECTORY ${PARGS_DIRECTORY} - OUTPUT_VARIABLE git_output) - endif() - - if(NOT PARGS_QUIET) - message("${git_output}") - endif() - - # now checkout the right commit - if(PARGS_GIT_TAG) - execute_process( - COMMAND ${GIT_EXECUTABLE} fetch --all --tags --prune - COMMAND ${GIT_EXECUTABLE} checkout tags/${PARGS_GIT_TAG} -b tag_${PARGS_GIT_TAG} - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - elseif(PARGS_GIT_BRANCH OR PARGS_GIT_COMMIT) - execute_process( - COMMAND ${GIT_EXECUTABLE} checkout ${PARGS_GIT_BRANCH} - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - else() - message(STATUS "no tag specified, defaulting to master") - execute_process( - COMMAND ${GIT_EXECUTABLE} checkout master - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - endif() - - if(NOT PARGS_QUIET) - message("${git_output}") - endif() -endfunction() diff --git a/node_modules/npm-mas-mas/cmaki/LICENSE b/node_modules/npm-mas-mas/cmaki/LICENSE deleted file mode 100644 index 7e79e4d..0000000 --- a/node_modules/npm-mas-mas/cmaki/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/npm-mas-mas/cmaki/README.md b/node_modules/npm-mas-mas/cmaki/README.md deleted file mode 100644 index 9d7b1b0..0000000 --- a/node_modules/npm-mas-mas/cmaki/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# :construction: I am under construction [![npm version](https://badge.fury.io/js/cmaki.svg)](https://badge.fury.io/js/cmaki) -Don't use it [![Build Status](https://travis-ci.org/makiolo/cmaki.svg?branch=master)](https://travis-ci.org/makiolo/cmaki) -# quick -bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/bootstrap.sh) diff --git a/node_modules/npm-mas-mas/cmaki/Utils.cmake b/node_modules/npm-mas-mas/cmaki/Utils.cmake deleted file mode 100644 index a76708c..0000000 --- a/node_modules/npm-mas-mas/cmaki/Utils.cmake +++ /dev/null @@ -1,32 +0,0 @@ -# returns true if only a single one of its arguments is true -function(xor result) - set(true_args_count 0) - - foreach(foo ${ARGN}) - if(foo) - math(EXPR true_args_count "${true_args_count}+1") - endif() - endforeach() - - if(NOT (${true_args_count} EQUAL 1)) - set(${result} FALSE PARENT_SCOPE) - else() - set(${result} TRUE PARENT_SCOPE) - endif() -endfunction() - -function(at_most_one result) - set(true_args_count 0) - - foreach(foo ${ARGN}) - if(foo) - math(EXPR true_args_count "${true_args_count}+1") - endif() - endforeach() - - if(${true_args_count} GREATER 1) - set(${result} FALSE PARENT_SCOPE) - else() - set(${result} TRUE PARENT_SCOPE) - endif() -endfunction() diff --git a/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh b/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh deleted file mode 100755 index faeadbd..0000000 --- a/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" - -if [[ "$WINEARCH" = "win32" ]]; then - wine $CMAKI_INSTALL/cmaki_identifier.exe -else - $CMAKI_EMULATOR $CMAKI_INSTALL/cmaki_identifier -fi - diff --git a/node_modules/npm-mas-mas/cmaki/cmaki.cmake b/node_modules/npm-mas-mas/cmaki/cmaki.cmake deleted file mode 100644 index 74b034f..0000000 --- a/node_modules/npm-mas-mas/cmaki/cmaki.cmake +++ /dev/null @@ -1,529 +0,0 @@ -if(NOT DEFINED CMAKE_MODULE_PATH) - set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}) -endif() - -IF(NOT DEFINED CMAKI_PATH) - set(CMAKI_PATH ${CMAKE_CURRENT_LIST_DIR}) -ENDIF() - -include("${CMAKE_CURRENT_LIST_DIR}/facts/facts.cmake") -include("${CMAKE_CURRENT_LIST_DIR}/GitUtils.cmake") - -option(FIRST_ERROR "stop on first compilation error" FALSE) - -macro(cmaki_setup) - enable_modern_cpp() - enable_testing() - SET(CMAKE_BUILD_TYPE_INIT Release) - set(CMAKE_CXX_STANDARD 14) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS ON) - IF(WITH_CONAN) - # Conan - message("-- Using conan dir: ${CMAKE_BINARY_DIR}") - include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - conan_basic_setup() - ENDIF() -endmacro() - -macro (mark_as_internal _var) - set(${_var} ${${_var}} CACHE INTERNAL "hide this!" FORCE) -endmacro(mark_as_internal _var) - -macro (option_combobox _var options default_option comment) - set(${_var} "${default_option}" CACHE STRING "${comment}") - set(${_var}Values "${options}" CACHE INTERNAL "hide this!" FORCE) - set_property(CACHE ${_var} PROPERTY STRINGS ${${_var}Values}) -endmacro() - -function(cmaki_install_file FROM) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_file_into FROM TO) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_file_and_rename FROM NEWNAME) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) - endforeach() -endfunction() - -function(cmaki_install_file_into_and_rename FROM TO NEWNAME) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) - endforeach() -endfunction() - -function(cmaki_install_files FROM) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - FILE(GLOB files ${FROM}) - INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_files_into FROM TO) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - FILE(GLOB files ${FROM}) - INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -macro(cmaki_install_inside_dir _DESTINE) - file(GLOB DEPLOY_FILES_AND_DIRS "${_DESTINE}/*") - foreach(ITEM ${DEPLOY_FILES_AND_DIRS}) - IF( IS_DIRECTORY "${ITEM}" ) - LIST( APPEND DIRS_TO_DEPLOY "${ITEM}" ) - ELSE() - IF(ITEM STREQUAL "${_DESTINE}/CMakeLists.txt") - MESSAGE("skipped file: ${_DESTINE}/CMakeLists.txt") - ELSE() - LIST(APPEND FILES_TO_DEPLOY "${ITEM}") - ENDIF() - ENDIF() - endforeach() - INSTALL(FILES ${FILES_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) - INSTALL(DIRECTORY ${DIRS_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) -endmacro() - -macro(cmaki_install_dir _DESTINE) - INSTALL(DIRECTORY ${_DESTINE} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) -endmacro() - -macro(cmaki_parse_parameters) - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 _MAIN_NAME) - list(REMOVE_AT PARAMETERS 0) - SET(HAVE_TESTS FALSE) - SET(HAVE_PCH FALSE) - SET(HAVE_PTHREADS FALSE) - set(_DEPENDS) - set(_SOURCES) - set(_TESTS) - set(_PCH) - set(_INCLUDES) - set(_SUFFIX_DESTINATION) - set(NOW_IN SOURCES) - while(PARAMETERS) - list(GET PARAMETERS 0 PARM) - if(PARM STREQUAL DEPENDS) - set(NOW_IN DEPENDS) - elseif(PARM STREQUAL SOURCES) - set(NOW_IN SOURCES) - elseif(PARM STREQUAL TESTS) - set(NOW_IN TESTS) - elseif(PARM STREQUAL PCH) - set(NOW_IN PCH) - elseif(PARM STREQUAL PTHREADS) - if(NOT WIN32) - # no enabled in windows - set(HAVE_PTHREADS TRUE) - endif() - elseif(PARM STREQUAL INCLUDES) - set(NOW_IN INCLUDES) - elseif(PARM STREQUAL DESTINATION) - set(NOW_IN DESTINATION) - else() - if(NOW_IN STREQUAL DEPENDS) - set(_DEPENDS ${_DEPENDS} ${PARM}) - elseif(NOW_IN STREQUAL SOURCES) - set(_SOURCES ${_SOURCES} ${PARM}) - elseif(NOW_IN STREQUAL TESTS) - set(_TESTS ${_TESTS} ${PARM}) - SET(HAVE_TESTS TRUE) - elseif(NOW_IN STREQUAL PCH) - set(_PCH ${PARM}) - SET(HAVE_PCH TRUE) - elseif(NOW_IN STREQUAL INCLUDES) - set(_INCLUDES ${_INCLUDES} ${PARM}) - elseif(NOW_IN STREQUAL DESTINATION) - set(_SUFFIX_DESTINATION ${PARM}) - else() - message(FATAL_ERROR "Unknown argument ${PARM}.") - endif() - endif() - list(REMOVE_AT PARAMETERS 0) - endwhile() -endmacro() - -function(cmaki_simple_executable) - cmaki_parse_parameters(${ARGV}) - set(_EXECUTABLE_NAME ${_MAIN_NAME}) - MESSAGE("++ executable ${_EXECUTABLE_NAME}") - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_EXECUTABLE_NAME}) - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_EXECUTABLE_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - if(WIN32) - ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) - else() - ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) - endif() - target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_EXECUTABLE_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_EXECUTABLE_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_clang() - -endfunction() - -function(cmaki_simple_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - MESSAGE("++ library ${_LIBRARY_NAME}") - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_LIBRARY_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_clang() - -endfunction() - -function(cmaki_simple_test) - cmaki_parse_parameters(${ARGV}) - set(_TEST_NAME ${_MAIN_NAME}) - common_flags() - common_linking(${_TEST_NAME}) - MESSAGE("++ test ${_TEST_NAME}") - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_TEST_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_executable(${_TEST_NAME} ${_SOURCES}) - target_link_libraries(${_TEST_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_TEST_NAME} -lpthread) - endif() - endif() - common_linking(${_TEST_NAME}) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_TEST_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - if(WIN32) - add_test( - NAME ${_TEST_NAME}__ - COMMAND ${_TEST_NAME} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - - if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - find_program(VALGRIND "valgrind") - if(VALGRIND) - add_test( - NAME ${_TEST_NAME}_memcheck - COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_cachegrind - COMMAND "${VALGRIND}" --tool=cachegrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_helgrind - COMMAND "${VALGRIND}" --tool=helgrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_callgrind - COMMAND "${VALGRIND}" --tool=callgrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_drd - COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - message(FATAL_ERROR "no valgrind detected") - endif() - else() - add_test( - NAME ${_TEST_NAME}_test - COMMAND bash cmaki_emulator.sh $ - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - CONFIGURATIONS ${BUILD_TYPE}) - endif() - endif() - endforeach() - generate_vcxproj_user(${_TEST_NAME}) - generate_clang() - -endfunction() - -macro(common_linking) - - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 TARGET) - # if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - # target_link_libraries(${TARGET} -lubsan) - # endif() - -endmacro() - -macro(common_flags) - - if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) - add_definitions(/wd4251) - add_definitions(/wd4275) - add_definitions(/wd4239) - add_definitions(/wd4316) - add_definitions(/wd4127) - add_definitions(/wd4245) - add_definitions(/wd4458) - add_definitions(/wd4146) - add_definitions(/wd4244) - add_definitions(/wd4189) - add_definitions(/wd4100) - add_definitions(/wd4706) - add_definitions(/WX /W4) - add_definitions(-Zm200) - endif() - - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") - endif() - -endmacro() - -macro(enable_modern_cpp) - - if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) - add_definitions(/EHsc) - add_definitions(/D_SCL_SECURE_NO_WARNINGS) - else() - # add_definitions(-fno-rtti -fno-exceptions ) - # activate all warnings and convert in errors - # add_definitions(-Weffc++) - # add_definitions(-pedantic -pedantic-errors) - - # Python: need disabling: initialization discards ‘const’ qualifier from pointer target type - # add_definitions(-Werror) - - add_definitions(-Wall -Wextra -Waggregate-return -Wcast-align -Wcast-qual -Wconversion) - add_definitions(-Wdisabled-optimization -Wformat=2 -Wformat-nonliteral -Wformat-security -Wformat-y2k) - add_definitions(-Wimport -Winit-self -Winline -Winvalid-pch -Wlong-long -Wmissing-field-initializers -Wmissing-format-attribute) - add_definitions(-Wpointer-arith -Wredundant-decls -Wshadow) - add_definitions(-Wstack-protector -Wunreachable-code -Wunused) - add_definitions(-Wunused-parameter -Wvariadic-macros -Wwrite-strings) - add_definitions(-Wswitch-default -Wswitch-enum) - # only gcc - # convert error in warnings - add_definitions(-Wno-error=shadow) - add_definitions(-Wno-error=long-long) - add_definitions(-Wno-error=aggregate-return) - add_definitions(-Wno-error=unused-variable) - add_definitions(-Wno-error=unused-parameter) - add_definitions(-Wno-error=deprecated-declarations) - add_definitions(-Wno-error=missing-include-dirs) - add_definitions(-Wno-error=packed) - add_definitions(-Wno-error=switch-default) - add_definitions(-Wno-error=float-equal) - add_definitions(-Wno-error=invalid-pch) - add_definitions(-Wno-error=cast-qual) - add_definitions(-Wno-error=conversion) - add_definitions(-Wno-error=switch-enum) - add_definitions(-Wno-error=redundant-decls) - add_definitions(-Wno-error=stack-protector) - add_definitions(-Wno-error=extra) - add_definitions(-Wno-error=unused-result) - add_definitions(-Wno-error=sign-compare) - - # raknet - add_definitions(-Wno-error=address) - add_definitions(-Wno-error=cast-qual) - add_definitions(-Wno-error=missing-field-initializers) - add_definitions(-Wno-error=write-strings) - add_definitions(-Wno-error=format-nonliteral) - - # sdl2 - add_definitions(-Wno-error=sign-conversion) - - # TODO: remove - add_definitions(-Wno-error=reorder) - - # if not have openmp - add_definitions(-Wno-error=unknown-pragmas) - - if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") - add_definitions(-Wno-error=suggest-attribute=format) - add_definitions(-Wno-error=suggest-attribute=noreturn) - add_definitions(-Wno-aggregate-return) - add_definitions(-Wno-long-long) - add_definitions(-Wno-shadow) - add_definitions(-Wno-strict-aliasing) - add_definitions(-Wno-error=inline) - add_definitions(-Wno-error=maybe-uninitialized) - add_definitions(-Wno-error=unused-but-set-variable) - add_definitions(-Wno-error=unused-local-typedefs) - # add_definitions(-Wno-error=float-conversion) - else() - add_definitions(-Wstrict-aliasing=2) - add_definitions(-Wno-error=format-nonliteral) - add_definitions(-Wno-error=cast-align) - add_definitions(-Wno-error=deprecated-register) - add_definitions(-Wno-error=mismatched-tags) - add_definitions(-Wno-error=overloaded-virtual) - add_definitions(-Wno-error=unused-private-field) - add_definitions(-Wno-error=unreachable-code) - # add_definitions(-Wno-error=discarded-qualifiers) - endif() - - # In Linux default now is not export symbols - # add_definitions(-fvisibility=hidden) - - # stop in first error - if(FIRST_ERROR) - add_definitions(-Wfatal-errors) - endif() - - endif() - - if (NOT DEFINED EXTRA_DEF) - if(NOT WIN32 OR MINGW OR MSYS) - include(CheckCXXCompilerFlag) - CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) - CHECK_CXX_COMPILER_FLAG("-std=c++1y" COMPILER_SUPPORTS_CXX1Y) - CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) - CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) - - if(COMPILER_SUPPORTS_CXX14) - set(CMAKE_CXX_STANDARD 14) - message("-- C++14 Enabled") - elseif(COMPILER_SUPPORTS_CXX11) - set(CMAKE_CXX_STANDARD 11) - message("-- C++11 Enabled") - elseif(COMPILER_SUPPORTS_CXX0X) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") - message("-- C++0x Enabled") - else() - message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") - endif() - endif() - else() - add_definitions(${EXTRA_DEF}) - endif() - - # TODO: need different combinations of artifacts (coverage=off / coverage=on, etc ...) - # if ((DEFINED COVERAGE) AND (COVERAGE STREQUAL "TRUE")) - # https://github.com/google/sanitizers/wiki/AddressSanitizerAsDso - # flags - if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 --coverage") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-elide-constructors") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-inline") - endif() - - # linker flags - if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) - SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage") - endif() - # endif() - -endmacro() - -macro(generate_vcxproj_user _EXECUTABLE_NAME) - IF(MSVC) - set(project_vcxproj_user "${CMAKE_CURRENT_BINARY_DIR}/${_EXECUTABLE_NAME}.vcxproj.user") - if (NOT EXISTS ${project_vcxproj_user}) - FILE(WRITE "${project_vcxproj_user}" - "\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n") - endif() - ENDIF() -endmacro() - -macro(generate_clang) - # Generate .clang_complete for full completation in vim + clang_complete - set(extra_parameters "") - get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) - foreach(dir ${dirs}) - set(extra_parameters ${extra_parameters} -I${dir}) - endforeach() - get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY COMPILE_DEFINITIONS) - foreach(dir ${dirs}) - set(extra_parameters ${extra_parameters} -D${dir}) - endforeach() - STRING(REGEX REPLACE ";" "\n" extra_parameters "${extra_parameters}") - FILE(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/.clang_complete" "${extra_parameters}\n") -endmacro() diff --git a/node_modules/npm-mas-mas/cmaki/facts/facts.cmake b/node_modules/npm-mas-mas/cmaki/facts/facts.cmake deleted file mode 100644 index b5409fd..0000000 --- a/node_modules/npm-mas-mas/cmaki/facts/facts.cmake +++ /dev/null @@ -1,735 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) -cmake_policy(SET CMP0045 OLD) - -find_program(PYTHON_EXECUTABLE NAMES python3.6 python3.5 python3 python) - -IF(NOT DEFINED CMAKI_PWD) - set(CMAKI_PWD $ENV{CMAKI_PWD}) -ENDIF() - -IF(NOT DEFINED CMAKI_INSTALL) - set(CMAKI_INSTALL $ENV{CMAKI_INSTALL}) -ENDIF() - -IF(NOT DEFINED NPP_ARTIFACTS_PATH) - set(NPP_ARTIFACTS_PATH ${CMAKI_PWD}/artifacts) -ENDIF() - -IF(NOT DEFINED CMAKE_PREFIX_PATH) - set(CMAKE_PREFIX_PATH ${NPP_ARTIFACTS_PATH}/cmaki_find_package) -ENDIF() - -IF(NOT DEFINED NPP_GENERATOR_PATH) - set(NPP_GENERATOR_PATH ${CMAKI_PATH}/../cmaki_generator) -ENDIF() - -IF(NOT DEFINED NPP_PACKAGE_JSON_FILE) - set(NPP_PACKAGE_JSON_FILE ${CMAKI_PATH}/../../artifacts.json) -ENDIF() - -if(NOT DEFINED CMAKI_IDENTIFIER OR NOT DEFINED CMAKI_PLATFORM) - set(ENV{CMAKI_INFO} ALL) - include(${CMAKI_PWD}/bin/cmaki_identifier.cmake) - set(CMAKI_IDENTIFIER "${PLATFORM}") - set(CMAKI_PLATFORM "${PLATFORM}") -endif() - -MESSAGE("CMAKI_PWD = ${CMAKI_PWD}") -MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") -MESSAGE("CMAKI_PATH = ${CMAKI_PATH}") -MESSAGE("NPP_ARTIFACTS_PATH = ${NPP_ARTIFACTS_PATH}") -MESSAGE("NPP_GENERATOR_PATH = ${NPP_GENERATOR_PATH}") -MESSAGE("NPP_PACKAGE_JSON_FILE = ${NPP_PACKAGE_JSON_FILE}") -MESSAGE("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") -MESSAGE("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") -MESSAGE("CMAKI_IDENTIFIER = ${CMAKI_IDENTIFIER}") -MESSAGE("CMAKI_PLATFORM = ${CMAKI_PLATFORM}") - -function(cmaki_find_package) - - message("-- begin cmaki_find_package") - - set(PARAMETERS ${ARGV}) - list(LENGTH PARAMETERS ARGV_LENGTH) - list(GET PARAMETERS 0 PACKAGE) - set(VERSION_REQUEST "") - set(CALL_RECURSIVE "TRUE") - set(PARM1 "") - if(ARGV_LENGTH GREATER 1) - list(GET PARAMETERS 1 PARM1) - message("-- extra parm1: ${PARM1}") - if(PARM1 STREQUAL "NONRECURSIVE") - message("${PACKAGE} is not recursive") - set(CALL_RECURSIVE "FALSE") - else() - message("${PACKAGE} is recursive") - set(VERSION_REQUEST "${PARM1}") - endif() - endif() - - IF(NOT DEFINED CMAKI_REPOSITORY) - set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") - ENDIF() - - # 2.5. define flags - set(FORCE_GENERATION NOT "$ENV{NPP_CACHE}") - - if(VERSION_REQUEST STREQUAL "") - ## - message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE}") - ## - # 1. obtener la version actual (o ninguno en caso de no tener el artefacto) - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - set(VERSION_REQUEST "${RESULT_VERSION}") - set(EXTRA_VERSION "--version=${VERSION_REQUEST}") - else() - set(VERSION_REQUEST "") - set(EXTRA_VERSION "") - endif() - - else() - # explicit version required from parameters - set(EXTRA_VERSION "--version=${VERSION_REQUEST}") - endif() - - message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION}") - ####################################################### - # 2. obtener la mejor version buscando en la cache local y remota - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - list(GET RESULT_VERSION 0 PACKAGE_MODE) - list(GET RESULT_VERSION 1 PACKAGE_NAME) - list(GET RESULT_VERSION 2 VERSION) - message("now PACKAGE_MODE = ${PACKAGE_MODE}") - message("now PACKAGE_NAME = ${PACKAGE_NAME}") - message("now VERSION = ${VERSION}") - if(PACKAGE_MODE STREQUAL "UNSUITABLE") - set(PACKAGE_MODE "EXACT") - set(VERSION ${VERSION_REQUEST}) - message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 1)") - # avoid remote cache, need build - set(FORCE_GENERATION "TRUE") - endif() - else() - set(PACKAGE_MODE "EXACT") - set(VERSION ${VERSION_REQUEST}) - message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 2)") - # avoid remote cache, need build - set(FORCE_GENERATION "TRUE") - endif() - ####################################################### - - # cmaki_find_package of depends - message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet") - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE DEPENDS_PACKAGES - OUTPUT_STRIP_TRAILING_WHITESPACE) - - if("${CALL_RECURSIVE}") - foreach(DEP ${DEPENDS_PACKAGES}) - if(PACKAGE STREQUAL "${DEP}") - message("-- skip: ${DEP}") - else() - message("-- cmaki_find_package: ${DEP}") - cmaki_find_package("${DEP}" NONRECURSIVE) - endif() - endforeach() - endif() - - get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) - get_filename_component(package_name_version "${package_dir}" NAME) - - # 3. si no tengo los ficheros de cmake, los intento descargar - set(artifacts_dir "${NPP_ARTIFACTS_PATH}") - set(depends_bin_package "${artifacts_dir}/${PACKAGE}-${VERSION}") - set(depends_package "${artifacts_dir}/${PACKAGE}-${VERSION}") - # pido un paquete, en funcion de: - # - paquete - # - version - # - plataforma - # - modo (COMPATIBLE / EXACT) - # Recibo el que mejor se adapta a mis especificaciones - # Otra opcion es enviar todos los ficheros de cmake de todas las versiones - - set(package_cmake_filename "${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz") - set(package_marker "${CMAKE_PREFIX_PATH}/${package_name_version}/${CMAKI_IDENTIFIER}.cmake") - set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") - set(package_generated_file ${artifacts_dir}/${package_filename}) - - set(COPY_SUCCESFUL FALSE) - IF(EXISTS "${package_cmake_abspath}") - message("-- reusing cmake file ${package_cmake_abspath}") - set(COPY_SUCCESFUL TRUE) - else() - if(NOT "${FORCE_GENERATION}") - set(http_package_cmake_filename "${CMAKI_REPOSITORY}/download.php?file=${package_cmake_filename}") - message("-- download file: ${http_package_cmake_filename} in ${package_cmake_abspath}") - cmaki_download_file("${http_package_cmake_filename}" "${package_cmake_abspath}") - if(NOT "${COPY_SUCCESFUL}") - file(REMOVE "${package_binary_filename}") - message("Error downloading ${http_package_cmake_filename}") - endif() - else() - message("WARN: no using cache remote for: ${PACKAGE}") - endif() - endif() - - if(NOT "${COPY_SUCCESFUL}") - message("fail download") - else() - message("reused or downloaded") - endif() - - # si la descarga no ha ido bien O no quieres utilizar cache - if(NOT "${COPY_SUCCESFUL}" OR FORCE_GENERATION STREQUAL "TRUE") - - # 5. compilo y genera el paquete en local - message("Generating artifact ${PACKAGE} ...") - - ### - message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o") - ### - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - RESULT_VARIABLE artifacts_result - ) - if(artifacts_result) - message(FATAL_ERROR "can't create artifact ${PACKAGE}: error ${artifacts_result}") - endif() - - ####################################################### - # 6: obtengo la version del paquete creado - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - list(GET RESULT_VERSION 0 PACKAGE_MODE) - list(GET RESULT_VERSION 1 PACKAGE_NAME) - list(GET RESULT_VERSION 2 VERSION) - message("NEW! PACKAGE_MODE = ${PACKAGE_MODE}") - message("NEW! PACKAGE_NAME = ${PACKAGE_NAME}") - message("NEW! VERSION = ${VERSION}") - else() - message(FATAL_ERROR "-- not found ${PACKAGE}.") - endif() - ####################################################### - - set(package_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz) - set(package_cmake_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz) - # refresh name (NEW $VERSION is generated) - set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") - - # 7. descomprimo el artefacto - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" - WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}" - RESULT_VARIABLE uncompress_result - ) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") - endif() - - # y tambien descomprimo el propio tar gz - # execute_process( - # COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_generated_file}" - # WORKING_DIRECTORY "${artifacts_dir}/" - # RESULT_VARIABLE uncompress_result2 - # ) - # if(uncompress_result2) - # message(FATAL_ERROR "Extracting ${package_generated_file} failed! Error ${uncompress_result2}") - # endif() - - # tengo el cmake pero no esta descomprimido - elseif(EXISTS "${package_cmake_abspath}" AND NOT EXISTS "${package_marker}") - - message("-- only uncompress") - ################ - message("${CMAKE_COMMAND} -E tar zxf ${package_cmake_abspath}") - ################ - - # 10. lo descomprimo - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" - WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}/" - RESULT_VARIABLE uncompress_result) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") - endif() - - else() - - # tengo cmake, y esta descomprmido - message("-- nothing to do") - message("-- ${package_cmake_abspath}") - message("-- ${package_marker}") - - endif() - - - # 12. hacer find_package tradicional, ahora que tenemos los ficheros de cmake - if(${PACKAGE_MODE} STREQUAL "EXACT") - message("-- using ${PACKAGE} ${VERSION} in EXACT") - find_package(${PACKAGE} ${VERSION} EXACT REQUIRED) - else() - message("-- using ${PACKAGE} ${VERSION} in COMPATIBLE") - find_package(${PACKAGE} ${VERSION} REQUIRED) - endif() - - # generate json - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/save_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} --version=${VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - message("error saving ${PACKAGE}:${VERSION} in ${artifacts_dir}") - endif() - - # 13 add includes - string(TOUPPER "${PACKAGE}" PACKAGE_UPPER) - foreach(INCLUDE_DIR ${${PACKAGE_UPPER}_INCLUDE_DIRS}) - list(APPEND CMAKI_INCLUDE_DIRS "${INCLUDE_DIR}") - endforeach() - - # 14. add libdirs - foreach(LIB_DIR ${${PACKAGE_UPPER}_LIBRARIES}) - list(APPEND CMAKI_LIBRARIES "${LIB_DIR}") - endforeach() - - # 15. add vers specific - set(${PACKAGE_UPPER}_INCLUDE_DIRS "${${PACKAGE_UPPER}_INCLUDE_DIRS}" PARENT_SCOPE) - set(${PACKAGE_UPPER}_LIBRARIES "${${PACKAGE_UPPER}_LIBRARIES}" PARENT_SCOPE) - - # 16. add vars globals - set(CMAKI_INCLUDE_DIRS "${CMAKI_INCLUDE_DIRS}" PARENT_SCOPE) - set(CMAKI_LIBRARIES "${CMAKI_LIBRARIES}" PARENT_SCOPE) - - message("-- end cmaki_find_package") - -endfunction() - -macro(cmaki_package_version_check) - # llamar a check_remote_version - # dando el nombre recibo la version - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE_FIND_NAME} --version=${PACKAGE_FIND_VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - list(GET RESULT_VERSION 0 RESULT) - list(GET RESULT_VERSION 1 NAME) - list(GET RESULT_VERSION 2 VERSION) - ################################### - set(PACKAGE_VERSION_${RESULT} 1) - set(${NAME}_VERSION ${VERSION}) -endmacro() - -function(cmaki_install_3rdparty) - foreach(CMAKI_3RDPARTY_TARGET ${ARGV}) - foreach(CMAKI_BUILD_TYPE ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) - string(TOUPPER "${CMAKI_BUILD_TYPE}" CMAKI_BUILD_TYPE_UPPER) - get_target_property(CMAKI_3RDPARTY_TARGET_TYPE ${CMAKI_3RDPARTY_TARGET} TYPE) - if(${CMAKI_3RDPARTY_TARGET_TYPE} STREQUAL "SHARED_LIBRARY") - get_target_property(CMAKI_3RDPARTY_TARGET_LOCATION ${CMAKI_3RDPARTY_TARGET} IMPORTED_LOCATION_${CMAKI_BUILD_TYPE_UPPER}) - get_target_property(CMAKI_3RDPARTY_TARGET_SONAME ${CMAKI_3RDPARTY_TARGET} IMPORTED_SONAME_${CMAKI_BUILD_TYPE_UPPER}) - get_target_property(CMAKI_3RDPARTY_TARGET_PDB ${CMAKI_3RDPARTY_TARGET} IMPORTED_PDB_${CMAKI_BUILD_TYPE_UPPER}) - if(CMAKI_3RDPARTY_TARGET_SONAME) - get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION_PATH "${CMAKI_3RDPARTY_TARGET_LOCATION}" PATH) - set(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION_PATH}/${CMAKI_3RDPARTY_TARGET_SONAME}") - endif() - get_filename_component(CMAKI_3RDPARTY_TARGET_INSTALLED_NAME "${CMAKI_3RDPARTY_TARGET_LOCATION}" NAME) - get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION}" REALPATH) - install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_LOCATION} - DESTINATION ${CMAKI_BUILD_TYPE} - CONFIGURATIONS ${CMAKI_BUILD_TYPE} - RENAME ${CMAKI_3RDPARTY_TARGET_INSTALLED_NAME}) - if((NOT UNIX) AND EXISTS ${CMAKI_3RDPARTY_TARGET_PDB}) - get_filename_component(CMAKI_3RDPARTY_TARGET_PDB_NAME "${CMAKI_3RDPARTY_TARGET_PDB}" NAME) - install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_PDB} - DESTINATION ${CMAKI_BUILD_TYPE} - CONFIGURATIONS ${CMAKI_BUILD_TYPE} - RENAME ${CMAKI_3RDPARTY_TARGET_PDB_NAME}) - endif() - endif() - endforeach() - endforeach() -endfunction() - -function(cmaki_download_file THE_URL INTO_FILE) - set(COPY_SUCCESFUL FALSE PARENT_SCOPE) - file(DOWNLOAD ${THE_URL} ${INTO_FILE} STATUS RET) - list(GET RET 0 RET_CODE) - if(RET_CODE EQUAL 0) - set(COPY_SUCCESFUL TRUE PARENT_SCOPE) - else() - set(COPY_SUCCESFUL FALSE PARENT_SCOPE) - endif() -endfunction() - -macro(cmaki_download_package) - - message("-- begin cmaki_download_package") - if(NOT DEFINED CMAKI_REPOSITORY) - set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") - endif() - get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) - get_filename_component(package_name_version "${package_dir}" NAME) - set(package_filename "${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz") - set(http_package_filename ${CMAKI_REPOSITORY}/download.php?file=${package_filename}) - set(artifacts_dir "${NPP_ARTIFACTS_PATH}") - get_filename_component(artifacts_dir "${artifacts_dir}" ABSOLUTE) - set(package_binary_filename "${artifacts_dir}/${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz") - set(package_uncompressed_dir "${artifacts_dir}/${package_name_version}-binary.tmp") - set(package_marker "${artifacts_dir}/${package_name_version}/${CMAKI_IDENTIFIER}") - set(package_compressed_md5 "${package_dir}/${package_name_version}-${CMAKI_IDENTIFIER}.md5") - set(_MY_DIR "${package_dir}") - set(_DIR "${artifacts_dir}/${package_name_version}") - - if(NOT EXISTS "${package_binary_filename}") - message("download ${package_binary_filename} ...") - if(EXISTS "${package_compressed_md5}") - file(READ "${package_compressed_md5}" md5sum ) - string(REGEX MATCH "[0-9a-fA-F]*" md5sum "${md5sum}") - # TODO: use md5sum (use python for download) - # cmaki_download_file("${http_package_filename}" "${package_binary_filename}" "${md5sum}" ) - message("downloading ${http_package_filename}") - cmaki_download_file("${http_package_filename}" "${package_binary_filename}") - if(NOT "${COPY_SUCCESFUL}") - file(REMOVE "${package_binary_filename}") - message(FATAL_ERROR "Error downloading ${http_package_filename}") - endif() - else() - file(REMOVE_RECURSE "${package_dir}") - file(REMOVE_RECURSE "${_DIR}") - MESSAGE(FATAL_ERROR "Checksum for ${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz not found. Rejecting to download an untrustworthy file.") - endif() - endif() - - if(NOT EXISTS "${package_marker}") - message("Extracting ${package_binary_filename} into ${package_uncompressed_dir}...") - file(MAKE_DIRECTORY "${package_uncompressed_dir}") - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_binary_filename}" - WORKING_DIRECTORY "${package_uncompressed_dir}" - RESULT_VARIABLE uncompress_result) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_binary_filename} failed! Error ${uncompress_result}") - endif() - file(COPY "${package_uncompressed_dir}/${package_name_version}" DESTINATION "${artifacts_dir}") - file(REMOVE_RECURSE "${package_uncompressed_dir}") - endif() - message("-- end cmaki_download_package") - -endmacro() - -function(cmaki_executable) - cmaki_parse_parameters(${ARGV}) - set(_EXECUTABLE_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_EXECUTABLE_NAME}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - if(WIN32) - ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) - else() - ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) - endif() - # set_target_properties(${_EXECUTABLE_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_EXECUTABLE_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_EXECUTABLE_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_EXECUTABLE_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_EXECUTABLE_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_vcxproj_user(${_EXECUTABLE_NAME}) - -endfunction() - -function(cmaki_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) - # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_static_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - add_definitions(-D${_LIBRARY_NAME}_STATIC) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} STATIC ${_SOURCES}) - # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_test) - cmaki_parse_parameters(${ARGV}) - set(_TEST_NAME ${_MAIN_NAME}) - set(_TEST_SUFFIX "_unittest") - common_flags() - common_linking(${_TEST_NAME}${_TEST_SUFFIX}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_executable(${_TEST_NAME}${_TEST_SUFFIX} ${_SOURCES}) - # set_target_properties(${_TEST_NAME}${_TEST_SUFFIX} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_TEST_NAME}${_TEST_SUFFIX} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - find_program(VALGRIND "valgrind") - if(VALGRIND) - add_test( - NAME ${_TEST_NAME}_valgrind_memcheck - COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_cachegrind - COMMAND "${VALGRIND}" --tool=cachegrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_helgrind - COMMAND "${VALGRIND}" --tool=helgrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_callgrind - COMMAND "${VALGRIND}" --tool=callgrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_valgrind_drd - COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - message(FATAL_ERROR "no valgrind detected") - endif() - endif() - if(WIN32) - add_test( - NAME ${_TEST_NAME}${_TEST_SUFFIX} - COMMAND $ - WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE}) - else() - add_test( - NAME ${_TEST_NAME}${_TEST_SUFFIX} - COMMAND bash ../cmaki_emulator.sh $ - WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE}) - endif() - endforeach() - generate_vcxproj_user(${_TEST_NAME}) - -endfunction() - -macro(cmaki_google_test) - find_package(GTest REQUIRED) - find_package(GMock REQUIRED) - add_definitions(-DWITH_MAIN) - add_definitions(-DWITH_GMOCK) - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 _MAIN_NAME) - cmaki_test(${ARGV}) -endmacro() - -macro(cmaki_python_library) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - cmaki_library(${ARGV} PTHREADS) - cmaki_parse_parameters(${ARGV}) - set_target_properties(${_MAIN_NAME} PROPERTIES PREFIX "") - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_MAIN_NAME} - DESTINATION ${BUILD_TYPE}/lib/python3.5/lib-dynload - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endmacro() - -macro(cmaki_boost_python_test) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - cmaki_google_test(${ARGV} PTHREADS) - cmaki_parse_parameters(${ARGV}) - set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") -endmacro() - -macro(cmaki_python_test) - # cmaki_find_package(python) - cmaki_parse_parameters(${ARGV}) - add_test( NAME ${_MAIN_NAME}_test - COMMAND ./bin/python3 ${_SOURCES} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) - set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "LD_LIBRARY_PATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") -endmacro() - -macro(cmaki_python_install) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - get_filename_component(PYTHON_DIR ${PYTHON_EXECUTABLE} DIRECTORY) - get_filename_component(PYTHON_PARENT_DIR ${PYTHON_DIR} DIRECTORY) - cmaki_install_inside_dir(${PYTHON_PARENT_DIR}) -endmacro() - -macro(cmaki_find_package_boost) - if(CMAKE_BUILD_TYPE MATCHES Debug) - set(Boost_DEBUG 1) - else() - set(Boost_DEBUG 0) - endif() - find_package(Boost REQUIRED) - include_directories(${Boost_INCLUDE_DIRS}) -endmacro() - diff --git a/node_modules/npm-mas-mas/cmaki/init/.clang-format b/node_modules/npm-mas-mas/cmaki/init/.clang-format deleted file mode 100644 index 008e6b0..0000000 --- a/node_modules/npm-mas-mas/cmaki/init/.clang-format +++ /dev/null @@ -1,66 +0,0 @@ ---- -Language: Cpp -# BasedOnStyle: WebKit -# indent public: -AccessModifierOffset: -4 -AlignAfterOpenBracket: false -AlignEscapedNewlinesLeft: false -AlignOperands: false -AlignTrailingComments: true -AllowAllParametersOfDeclarationOnNextLine: false -AllowShortBlocksOnASingleLine: false -AllowShortCaseLabelsOnASingleLine: false -AllowShortIfStatementsOnASingleLine: false -AllowShortLoopsOnASingleLine: false -AllowShortFunctionsOnASingleLine: All -AlwaysBreakAfterDefinitionReturnType: false -AlwaysBreakTemplateDeclarations: true -AlwaysBreakBeforeMultilineStrings: false -BreakBeforeBinaryOperators: All -BreakBeforeTernaryOperators: true -BreakConstructorInitializersBeforeComma: true -BinPackParameters: true -BinPackArguments: true -ColumnLimit: 100 -ConstructorInitializerAllOnOneLineOrOnePerLine: false -ConstructorInitializerIndentWidth: 4 -DerivePointerAlignment: false -ExperimentalAutoDetectBinPacking: false -IndentCaseLabels: true -IndentWrappedFunctionNames: false -IndentFunctionDeclarationAfterType: false -MaxEmptyLinesToKeep: 2 -KeepEmptyLinesAtTheStartOfBlocks: true -NamespaceIndentation: Inner -ObjCBlockIndentWidth: 4 -ObjCSpaceAfterProperty: true -ObjCSpaceBeforeProtocolList: true -PenaltyBreakBeforeFirstCallParameter: 19 -PenaltyBreakComment: 300 -PenaltyBreakString: 1000 -PenaltyBreakFirstLessLess: 120 -PenaltyExcessCharacter: 1000000 -PenaltyReturnTypeOnItsOwnLine: 60 -PointerAlignment: Left -SpacesBeforeTrailingComments: 2 -Cpp11BracedListStyle: true -Standard: Cpp11 -IndentWidth: 4 -TabWidth: 4 -UseTab: Always -BreakBeforeBraces: Allman -SpacesInParentheses: false -SpacesInSquareBrackets: false -SpacesInAngles: false -SpaceInEmptyParentheses: false -SpacesInCStyleCastParentheses: false -SpaceAfterCStyleCast: false -SpacesInContainerLiterals: true -SpaceBeforeAssignmentOperators: true -ContinuationIndentWidth: 4 -CommentPragmas: '^ IWYU pragma:' -ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] -SpaceBeforeParens: ControlStatements -DisableFormat: false -... - diff --git a/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl deleted file mode 100644 index 3ea29e5..0000000 --- a/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BuildName: - BuildStamp: - Name: - Generator: - CompilerName: - OSName: - Hostname: - OSRelease: - OSVersion: - OSPlatform: - Is64Bits: - VendorString: - VendorID: - FamilyID: - ModelID: - ProcessorCacheSize: - NumberOfLogicalCPU: - NumberOfPhysicalCPU: - TotalVirtualMemory: - TotalPhysicalMemory: - LogicalProcessorsPerPhysical: - ProcessorClockFrequency: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki/junit/README.md b/node_modules/npm-mas-mas/cmaki/junit/README.md deleted file mode 100644 index 4f989c6..0000000 --- a/node_modules/npm-mas-mas/cmaki/junit/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Source -https://bitbucket.org/shackra/ctest-jenkins/ - diff --git a/node_modules/npm-mas-mas/cmaki_docker/.travis.yml b/node_modules/npm-mas-mas/cmaki_docker/.travis.yml deleted file mode 100644 index 020ec9d..0000000 --- a/node_modules/npm-mas-mas/cmaki_docker/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -services: docker -os: linux -script: - - ./build.sh diff --git a/node_modules/npm-mas-mas/cmaki_docker/LICENSE b/node_modules/npm-mas-mas/cmaki_docker/LICENSE deleted file mode 100644 index 53546c1..0000000 --- a/node_modules/npm-mas-mas/cmaki_docker/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_docker/README.md b/node_modules/npm-mas-mas/cmaki_docker/README.md deleted file mode 100644 index 594568c..0000000 --- a/node_modules/npm-mas-mas/cmaki_docker/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# cmaki_docker - -[![Build Status](https://travis-ci.org/makiolo/cmaki_docker.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_docker) - -multiple pusher of docker images. - -``` -for image in (windows-x86, windows-x64, linux-x86, linux-x64, ...) - makiolo/$image = dockcross/$image + github:makiolo/cmaki_scripts/cmaki_depends.sh -done -``` diff --git a/node_modules/npm-mas-mas/cmaki_docker/build.sh b/node_modules/npm-mas-mas/cmaki_docker/build.sh deleted file mode 100755 index 26e71f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_docker/build.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env bash -#/bin/bash -prefix=$(pwd)/bin -mkdir -p $prefix - -# iterate in known images -curl https://raw.githubusercontent.com/dockcross/dockcross/master/Makefile -o dockcross-Makefile -for image in $(make -f dockcross-Makefile display_images); do - if [[ $(docker images -q dockcross/$image) != "" ]]; then - docker rmi -f dockcross/$image - echo dockcross/$image removed. - fi -done - -for image in $(make -f dockcross-Makefile display_images); do - - if [[ "$image" == "manylinux-x86" ]]; then - continue - fi - - if [[ "$image" == "manylinux-x64" ]]; then - continue - fi - - echo "copy dockcross/$image to makiolo/$image (with script change)" - cat<Dockerfile -FROM dockcross/$image:latest -ENV DEBIAN_FRONTEND noninteractive -RUN curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/cmaki_depends.sh | bash -EOF - - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - docker build . -t makiolo/$image - docker push makiolo/$image - - # clean - docker rmi -f dockcross/$image - docker rmi -f makiolo/$image -done - diff --git a/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt deleted file mode 100644 index 91cc3ac..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt +++ /dev/null @@ -1,95 +0,0 @@ -project(cmaki_generator) -cmake_minimum_required(VERSION 3.0) - -MESSAGE("-- compiler ${CMAKI_COMPILER}, platform ${CMAKI_PLATFORM}") - -include(cmaki) - -IF(CMAKE_BUILD_TYPE STREQUAL "Debug") - MESSAGE("-- Debug Mode") - SET(GLOBAL_BUILD_MODE "Debug") -ELSEIF(CMAKE_BUILD_TYPE STREQUAL "Release") - MESSAGE("-- Release Mode") - SET(GLOBAL_BUILD_MODE "Release") -ELSEIF(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") - MESSAGE("-- RelWithDebInfo Mode") - SET(GLOBAL_BUILD_MODE "RelWithDebInfo") -ELSE() - MESSAGE("-- Build mode default to Release") - MESSAGE("-- Release Mode") - SET(GLOBAL_BUILD_MODE "Release") -ENDIF() - -IF(NOT PACKAGE) - SET(PACKAGE "packagename_invalid") - MESSAGE(FATAL_ERROR "Invalid package name") -ENDIF() - -IF(NOT PACKAGE_VERSION) - SET(PACKAGE_VERSION "verson_invalid") - MESSAGE(FATAL_ERROR "Invalid version in package") -ENDIF() - -IF(NOT LIBRARY_TYPE) - SET(LIBRARY_TYPE "STATIC") -ENDIF() - -SET(PACKAGE "${PACKAGE}" CACHE STRING "Package to compile") -SET(PACKAGE_VERSION "${PACKAGE_VERSION}" CACHE STRING "Version to compile") - -SET(CMAKE_VERBOSE_MAKEFILE ON) -# Use relative paths on Windows, to reduce path size for command-line limits -if (WIN32) - set(CMAKE_USE_RELATIVE_PATHS true) - set(CMAKE_SUPPRESS_REGENERATION true) -endif() - -IF(NOT DEFINED GTC_INSTALL_PREFIX) - SET(GTC_INSTALL_PREFIX "${NPP_ARTIFACTS_PATH}/${PACKAGE}-${PACKAGE_VERSION}-${CMAKI_PLATFORM}/${PACKAGE}-${PACKAGE_VERSION}") - SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}") - SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder executables") - SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder libs") -ELSE() - SET(GTC_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") - SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") - SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/bin" CACHE PATH "Folder executables") - SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/lib" CACHE PATH "Folder libs") -ENDIF() - -MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") -MESSAGE("GTC_INSTALL_PREFIX = ${GTC_INSTALL_PREFIX}") -MESSAGE("CMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}") -MESSAGE("EXECUTABLE_OUTPUT_PATH = ${EXECUTABLE_OUTPUT_PATH}") -MESSAGE("LIBRARY_OUTPUT_PATH = ${LIBRARY_OUTPUT_PATH}") - -# gnu variables can prepend CMAKE_INSTALL_PREFIX -set(CMAKE_INSTALL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") -set(CMAKE_INSTALL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") -set(CMAKE_INSTALL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") -set(CMAKE_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") -set(CMAKE_INSTALL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") -set(CMAKE_INSTALL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") -set(CMAKE_INSTALL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") -set(CMAKE_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") -set(CMAKE_INSTALL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") -set(CMAKE_INSTALL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") -set(CMAKE_INSTALL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") -set(CMAKE_INSTALL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") -set(CMAKE_INSTALL_FULL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") -set(CMAKE_INSTALL_FULL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") -set(CMAKE_INSTALL_FULL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") -set(CMAKE_INSTALL_FULL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") -set(CMAKE_INSTALL_FULL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") -set(CMAKE_INSTALL_FULL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") -set(CMAKE_INSTALL_FULL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") -set(CMAKE_INSTALL_FULL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") -set(CMAKE_INSTALL_FULL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_FULL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_FULL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") -set(CMAKE_INSTALL_FULL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") -set(CMAKE_INSTALL_FULL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") -set(CMAKE_INSTALL_FULL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") -LINK_DIRECTORIES(${LIBRARY_OUTPUT_PATH}) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/LICENSE b/node_modules/npm-mas-mas/cmaki_generator/LICENSE deleted file mode 100644 index 7e79e4d..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/npm-mas-mas/cmaki_generator/README.md b/node_modules/npm-mas-mas/cmaki_generator/README.md deleted file mode 100644 index 6b5b746..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# cmaki_generator [![npm version](https://badge.fury.io/js/cmaki_generator.svg)](https://badge.fury.io/js/cmaki_generator) - -gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_generator.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_generator) - -# artifacts responsability -- boost-headers -- boost-system -- boost-random -- boost-atomic -- boost-thread -- boost-chrono -- boost-context -- boost-coroutine2 -- boost-signals -- boost-test -- boost-regex -- boost-filesystem -- boost-program-options -- python -- boost-python -- boost-python-debug -- boost-serialization diff --git a/node_modules/npm-mas-mas/cmaki_generator/build b/node_modules/npm-mas-mas/cmaki_generator/build deleted file mode 100755 index c98e1d8..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/build +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -directory=$(dirname $0) -if hash cygpath 2>/dev/null; then - directory=$(cygpath -w ${directory}) -fi - -python "${directory}/build.py" "$@" -out=$? -exit ${out} diff --git a/node_modules/npm-mas-mas/cmaki_generator/build.cmd b/node_modules/npm-mas-mas/cmaki_generator/build.cmd deleted file mode 100644 index e0ea6bd..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/build.cmd +++ /dev/null @@ -1,11 +0,0 @@ -@ECHO OFF -SET DIRWORK=%~dp0 - -IF EXIST "%PYTHON%" ( - rem ok -) ELSE ( - set PYTHON=python -) - -SET PATH=%~dp0\bin;%PATH% -"%PYTHON%" %DIRWORK%\build.py %* diff --git a/node_modules/npm-mas-mas/cmaki_generator/build.py b/node_modules/npm-mas-mas/cmaki_generator/build.py deleted file mode 100644 index 5d86829..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/build.py +++ /dev/null @@ -1,757 +0,0 @@ -import os -import os.path -import sys -import fnmatch -import logging -import utils -import argparse -import pipeline -import traceback -import copy -import datetime -# object package -from third_party import ThirdParty -from collections import OrderedDict -from third_party import exceptions_fail_group -from third_party import exceptions_fail_program -from third_party import alias_priority_name -from third_party import alias_priority_name_inverse -from third_party import CMAKELIB_URL -from third_party import is_valid -from third_party import is_blacklisted -from third_party import prepare_cmakefiles -# gtc stages -from purge import purge -from prepare import prepare -from compilation import compilation -from packing import packing -from run_tests import run_tests -from upload import upload -from get_return_code import get_return_code -from third_party import FailThirdParty - -# GLOBAL NO MUTABLES -image_pattern = "image.%Y.%m.%d.%H%M" - -try: - import yaml -except ImportError: - logging.error('[Warning] Not yaml library present') - logging.error('[Warning] PyYAML (python extension) is mandatory') - if utils.is_windows(): - logging.error('You can use pip for install:') - logging.error(' pip intall pyyaml') - sys.exit(1) - -# Global mutable -compiler_replace_maps = {} - -# Global const -yaml_common_references = 'common.yml' -yaml_collapsed_third_parties = '.3p.yml' -yaml_collapsed_final = '.data.yml' - -class Loader(yaml.Loader): - def __init__(self, stream): - self._root = os.path.split(stream.name)[0] - super(Loader, self).__init__(stream) - - def include(self, node): - filename = os.path.join(self._root, self.construct_scalar(node)) - with open(filename, 'r') as f: - return yaml.load(f, Loader) - -def amalgamation_yaml(rootdir, yamlfile=None): - Loader.add_constructor('!include', Loader.include) - - # autogeneration .data.yml - yaml_collapsed_final_abspath = os.path.join(rootdir, yaml_collapsed_final) - yaml_common_references_abspath = os.path.join(rootdir, yaml_common_references) - with open(yaml_collapsed_final_abspath, 'wt') as f: - f.write('# autogenerated file, dont edit it !!!---\n') - f.write('---\n') - # inject common.yml - f.write('%sreferences:\n' % (' '*4)) - with open(yaml_common_references_abspath, 'r') as fr: - for line in fr.readlines(): - f.write('%s%s' % (' '*8, line)) - collapse_third_parties(rootdir, yaml_collapsed_third_parties, yamlfile=yamlfile) - if yamlfile is None and not parameters.no_back_yaml: - node_modules_dir = os.path.abspath(os.path.join(rootdir, '..', '..')) - for path in os.listdir(node_modules_dir): - fullpath = os.path.join(os.path.abspath(node_modules_dir), path) - if os.path.isdir(fullpath): - cmaki_file = os.path.join(fullpath, 'cmaki.yml') - if os.path.isfile(cmaki_file): - with open(cmaki_file, 'r') as fr: - with open(yaml_collapsed_third_parties, 'a') as tp_append: - for line in fr.readlines(): - tp_append.write(line) - # inject third_parties.yml - f.write('%sthird_parties:\n' % (' '*4)) - with open(yaml_collapsed_third_parties) as ft: - for line in ft.readlines(): - # sys.stdout.write("searching {}".format(line)) - f.write('%s%s' % (' '*8, line)) - -def search_nodes_by_key(list_nodes, found_key): - nodes = [] - for key, node in list_nodes: - if key == found_key: - nodes.append(node) - return nodes - -def collapse_third_parties(rootdir, filename, yamlfile=None): - p = pipeline.make_pipe() - # begin - if yamlfile is None: - p = pipeline.find(rootdir, 3)(p) - else: - p = pipeline.echo(yamlfile)(p) - # exclusions - p = pipeline.endswith('.yml')(p) - p = pipeline.grep_v('.travis.yml')(p) - p = pipeline.grep_v('shippable.yml')(p) - p = pipeline.grep_v('appveyor.yml')(p) - p = pipeline.grep_v('codecov.yml')(p) - p = pipeline.grep_v('.github')(p) - p = pipeline.grep_v('docker-compose.yml')(p) - p = pipeline.grep_v('circle.yml')(p) - p = pipeline.grep_v('_config.yml')(p) - p = pipeline.grep_v('.circleci-matrix.yml')(p) - p = pipeline.grep_v('.build_')(p) - p = pipeline.grep_v(yaml_collapsed_final)(p) - p = pipeline.grep_v(yaml_common_references)(p) - p = pipeline.grep_v(yaml_collapsed_third_parties)(p) - p = pipeline.grep_v(' - Copy.yml')(p) - p = pipeline.info('---> (yaml found.) ')(p) - # cat - p = pipeline.cat()(p) - # p = pipeline.info('amalgamated: ')(p) - # write - p = pipeline.write_file(filename)(p) - # end - pipeline.end_pipe()(p) - -def run_purge(solutions): - - # create pipeline - with pipeline.create() as (p, finisher): - - # feed all packages - p = pipeline.feed(packages)(p) - - # clean intermediate folders - p = pipeline.do(purge, True, parameters)(p) - - # close pipe - finisher.send(p) - -def convert_priority_to_integer(priority): - if priority is not None: - error = False - if priority in alias_priority_name_inverse: - priority = alias_priority_name_inverse[priority] - else: - try: - priority_integer = int(priority) - if priority_integer in alias_priority_name: - priority = priority_integer - else: - error = True - except ValueError: - error = True - if error: - logging.error('Invalid priority name: %s' % priority) - sys.exit(1) - return priority - -def show_results(parameters, groups_ordered, rets, unittests): - # show final report - anyFail = 0 - if len(rets) > 0: - logging.info('-' * 80) - logging.info('') - for name in rets: - state = rets[name] - if state != "OK": - anyFail = 1 - - # package with unittests? - if name in unittests: - try: - result_test = unittests[name] - except KeyError: - result_test = 'No unittest found' - - if state != "OK": - logging.info("Compiled %30s - STATUS: %15s" % (name, state)) - else: - # only want know test result if is OK - logging.info("Compiled %30s - STATUS: %15s - TESTS: %s" % (name, state, result_test)) - else: - logging.info("Compiled %30s - STATUS: %15s" % (name, state)) - - logging.info('') - logging.info( '-'* 80) - else: - anyFail = 1 - logging.error('No results generated.') - - # any have exceptions ? - have_exceptions = False - for _, packages in groups_ordered: - for node in packages: - if len(node.exceptions) > 0: - have_exceptions = True - - if have_exceptions: - logging.error("---------- begin summary of exceptions ------------------------") - # show postponed exceptions - for _, packages in groups_ordered: - for node in packages: - if len(node.exceptions) > 0: - # something was wrong - anyFail = 1 - # show exceptions of this package - package = node.get_package_name() - version = node.get_version() - logging.error("package %s (%s) with exceptions" % (package, version)) - i = 0 - for exc_type, exc_value, exc_traceback in node.exceptions: - logging.error("---- Exception #%d / %d ----------" % (i+1, len(node.exceptions))) - traceback.print_exception(exc_type, exc_value, exc_traceback) - logging.error("----------------------------------") - i += 1 - logging.error("---------- end summary of exceptions ------------------------") - return anyFail - -def clean_subset(solutions): - groups = copy.deepcopy(solutions) - # 2/4: remove solutions are subset of other solution - for solution1 in solutions: - for solution2 in solutions: - if solution1 != solution2: - match = True - for node in solution1: - if node not in solution2: - match = False - break - if match and (solution1 in groups): - groups.remove(solution1) - return groups - -def init_parameter_path(value, default): - if value is None: - value = default - else: - # expand variables in no-windows - if not utils.is_windows(): - value = value.replace('~', utils.get_real_home()) - value = os.path.abspath(value) - return value - - -def parse_arguments(): - - parser = argparse.ArgumentParser(prog=""" - -cmaki_generator: - - Can build artifacts in a easy way. Each third-party need a block definition in yaml. This block contain all need information necessary for download, build, testing and packing. - -usage:""") - group_main = parser.add_argument_group('basic usage') - group_main.add_argument('packages', metavar='packages', type=str, nargs='*', - help='name (or list names) third party') - group_main.add_argument('--plan', '--dry-run', dest='plan', action='store_true', - help='Show packages plan (like a dry-run)', default=False) - group_main.add_argument('--server', dest='server', help='artifact server', default=None) - group_main.add_argument('--no-back-yaml', dest='no_back_yaml', action='store_true', help='no search back yaml', - default=False) - group_layer = group_main.add_mutually_exclusive_group() - group_layer.add_argument('--layer', dest='priority', - help='filter by layername. Valid values: (minimal|tools|third_party)', default=None) - group_layer.add_argument('--no-layer', dest='no_priority', - help='negation filter by layername. Valid values: (minimal|tools|third_party)', - default=None) - # group_main.add_argument('-t', '--tag', action='append', metavar='tag', type=str, help='NOT IMPLEMMENTED YET: filter tag third party') - group_padawan = parser.add_argument_group('padawan') - group_purge = group_padawan.add_mutually_exclusive_group() - group_purge.add_argument('--no-purge', dest='no_purge', action='store_true', help='remove purge from pipeline', - default=False) - group_purge.add_argument('--only-purge', dest='only_purge', action='store_true', - help='execute only purge in pipeline', default=False) - group_prepare = group_padawan.add_mutually_exclusive_group() - group_prepare.add_argument('--no-prepare', dest='no_prepare', action='store_true', - help='remove prepare from pipeline', default=False) - group_prepare.add_argument('--only-prepare', dest='only_prepare', action='store_true', - help='execute only prepare in pipeline', default=False) - group_compilation = group_padawan.add_mutually_exclusive_group() - group_compilation.add_argument('--no-compilation', dest='no_compilation', action='store_true', - help='remove compilation from pipeline', default=False) - group_compilation.add_argument('--only-compilation', dest='only_compilation', action='store_true', - help='execute only compilation in pipeline', default=False) - group_packing = group_padawan.add_mutually_exclusive_group() - group_packing.add_argument('--no-packing', dest='no_packing', action='store_true', - help='remove packing from pipeline', default=False) - group_packing.add_argument('--only-packing', dest='only_packing', action='store_true', - help='execute only packing in pipeline', default=False) - group_run_tests = group_padawan.add_mutually_exclusive_group() - group_run_tests.add_argument('--no-run-tests', dest='no_run_tests', action='store_true', - help='remove run_tests from pipeline', default=False) - group_run_tests.add_argument('--only-run-tests', dest='only_run_tests', action='store_true', - help='execute only run_tests in pipeline', default=False) - group_upload = group_padawan.add_mutually_exclusive_group() - group_upload.add_argument('--no-upload', dest='no_upload', action='store_true', help='remove upload from pipeline', - default=False) - group_upload.add_argument('--only-upload', dest='only_upload', action='store_true', - help='execute only upload in pipeline', default=False) - # creador de third parties - group_jedi = parser.add_argument_group('jedi') - group_jedi.add_argument('-o', '--only', dest='build_only', action='store_true', - help='build only explicit packages and not your depends') - group_jedi.add_argument('-v', '--verbose', action='count', help='verbose mode', default=0) - group_jedi.add_argument('-q', '--quiet', dest='quiet', action='store_true', help='quiet mode', default=False) - group_jedi.add_argument('-d', '--debug', action='store_true', help='Ridiculous debugging (probably not useful)') - group_jedi.add_argument('--purge-if-fail', dest='purge_if_fail', action='store_true', - help='purge even if a package finish with fail', default=False) - group_jedi.add_argument('--with-svn', dest='with_svn', help='svn executable', default=None) - group_jedi.add_argument('--fast', dest='fast', action='store_true', default=False, help=argparse.SUPPRESS) - group_jedi.add_argument('--log', dest='log', help='specified full path log (default is "gtc.log")', - default='gtc.log') - group_jedi.add_argument('--no-packing-cmakefiles', action='store_true', dest='no_packing_cmakefiles', - help='no packing cmakefiles', default=False) - group_jedi.add_argument('--blacklist', dest='blacklist', - help='third party in quarantine (default is $ROOTDIR + "blacklist.txt")', default=None) - group_jedi.add_argument('--no-blacklist', action='append', dest='no_blacklist', - help='list packages (separated with comma), for annular blacklist effect.', default=[]) - group_master_jedi = parser.add_argument_group('master jedi') - group_master_jedi.add_argument('--rootdir', dest='rootdir', - help='input folder with yamls, is recursive (default is current directory)', - default=None) - group_master_jedi.add_argument('--prefix', dest='prefix', - help='output folder where packages will be generated (default is $ROOTDIR + "artifacts")', - default=None) - group_master_jedi.add_argument('--cmakefiles', dest='cmakefiles', - help='input folder with cmake scripts (default is $PREFIX + "cmakelib")', - default=None) - group_master_jedi.add_argument('--third-party-dir', dest='third_party_dir', - help='output folder for cmakefiles (default is $CMAKEFILES + "3rdparty")', - default=None) - group_master_jedi.add_argument('--depends', dest='depends', help='json for save versions', default=None) - group_master_jedi.add_argument('--yaml', dest='yaml', help='unique file with third party to compile', default=None) - parameters = parser.parse_args() - ''' - TODO: - refactor: - prefix = DEPENDS_PATH (cmake3p) (artifacts) - cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmaki, cmaki_find_package) - third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) - rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD - CMAKI_INSTALL: donde se espera tener instalado el cmaki_identifier - ''' - - cmaki_pwd = os.environ.get('CMAKI_PWD', os.getcwd()) - cmaki_install = os.environ.get('CMAKI_INSTALL', os.path.join(cmaki_pwd, 'bin')) - - ''' - axiomas: - - cmaki_pwd - - cmaki_install - - cmaki - - reglas: - - rootdir = cmaki/../cmaki_generator - - prefix = cmaki_pwd/artifacts - - third-party-dir = prefix/cmaki_find_package - - depends = cmaki_pwd/depends.json - - blacklist = rootdir/blacklist.txt - ''' - - - parameters.rootdir = init_parameter_path(parameters.rootdir, os.getcwd()) - parameters.prefix = init_parameter_path(parameters.prefix, os.path.join(cmaki_pwd, 'artifacts')) - parameters.third_party_dir = init_parameter_path(parameters.third_party_dir, os.path.join(parameters.prefix, 'cmaki_find_package')) - parameters.cmakefiles = init_parameter_path(parameters.cmakefiles, os.path.join(parameters.rootdir, '..', 'cmaki')) - parameters.blacklist = init_parameter_path(parameters.blacklist, os.path.join(parameters.rootdir, 'blacklist.txt')) - parameters.depends = init_parameter_path(parameters.depends, os.path.join(cmaki_pwd, 'depends.json')) - - # convert priority to int - parameters.priority = convert_priority_to_integer(parameters.priority) - parameters.no_priority = convert_priority_to_integer(parameters.no_priority) - if parameters.only_purge: - parameters.no_purge = False - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_prepare: - parameters.no_purge = True - parameters.no_prepare = False - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_compilation: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = False - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_packing: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = False - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_run_tests: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = False - parameters.no_upload = True - elif parameters.only_upload: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = False - - if parameters.server is None: - if 'NPP_SERVER' not in os.environ: - logging.warning('Using artifacts server by default. If you need, can explicit define environment var NPP_SERVER') - os.environ['NPP_SERVER'] = 'http://artifacts.myftp.biz' - parameters.server = os.environ['NPP_SERVER'] - - - if 'NPP_CACHE' not in os.environ: - logging.warning('Using enablibing npm++ cache by default.') - os.environ['NPP_CACHE'] = 'TRUE' - - return parameters - - -if __name__ == '__main__': - - parameters = parse_arguments() - - # prepare logging - if parameters.debug: - utils.setup_logging(logging.DEBUG, parameters.log) - else: - utils.setup_logging(logging.INFO, parameters.log) - - if parameters.verbose: - logging.info('parameters = {}'.format(parameters)) - - if not parameters.quiet: - logging.info('---- MODE: {}'.format( os.environ['MODE'] )) - logging.info('---- CMAKI_PWD: {}'.format( os.environ['CMAKI_PWD'] )) - logging.info('---- CMAKI_INSTALL: {}'.format( os.environ['CMAKI_INSTALL'] )) - logging.info('---- rootdir: {}'.format(parameters.rootdir)) - logging.info('---- prefix: {}'.format(parameters.prefix)) - logging.info('---- cmakefiles: {}'.format(parameters.cmakefiles)) - logging.info('---- third_party_dir: {}'.format(parameters.third_party_dir)) - logging.info('---- blacklist: {}'.format(parameters.blacklist)) - logging.info('---- depends: {}'.format(parameters.depends)) - - - - # fetch remotes yaml - # i = 0 - # for package in parameters.packages: - # if package.startswith('github://'): - # repo = package[len('github://'):] - # utils.trymkdir('github') - # yml_file = os.path.join('github', '{}.yml'.format(repo.replace('/', '_'))) - # if os.path.isfile(yml_file): - # utils.tryremove(yml_file) - # try: - # download_from_url('https://raw.githubusercontent.com/{}/master/cmaki.yml'.format(repo), yml_file) - # except urllib2.HTTPError: - # logging.error('not found cmaki.yml in {}'.format(package)) - # sys.exit(1) - # parameters.packages[i] = repo.split('/')[1] - # i += 1 - - prepare_cmakefiles(parameters.cmakefiles) - - # generate amalgaimation yaml - amalgamation_yaml(parameters.rootdir, parameters.yaml) - - # load yaml to python - with open(yaml_collapsed_final, 'rt') as fy: - third_parties_data_yaml = yaml.load(fy, Loader) - - # generate list of tuples (key, parameters) - count = 0 - third_parties_data = [] - for third in third_parties_data_yaml['third_parties']: - for key in third: - parms = third[key] - third_parties_data.append( (key, parms) ) - count += 1 - - logging.info('Found {} packages.'.format(count)) - logging.info('Package requested: {}'.format(parameters.packages)) - - if count == 1 and (len(parameters.packages) == 0): - parameters.packages = [ third_parties_data[0][0] ] - - # create nodes and choose selected by filter and mask - nodes = [] - selected = [] - for key, parms in third_parties_data: - node = ThirdParty(parameters, key, parms) - # define variables for unused projects - package = node.get_package_name() - - # fill compiler_replace_maps - node.apply_replace_maps(compiler_replace_maps) - - if (node.is_valid() - and (parameters.priority is None or (parameters.priority == node.get_priority())) - and (parameters.no_priority is None or (parameters.no_priority != node.get_priority()))): - nodes.append( (key, node) ) - if (parameters.packages == ['.'] or parameters.packages == ['*']): - selected.append( (key, node) ) - elif ((parameters.packages == ['all']) and (not node.get_exclude_from_all())): - selected.append( (key, node) ) - else: - for exp in parameters.packages: - if fnmatch.fnmatch(key.lower(), exp.lower()): - selected.append( (key, node) ) - - logging.info('Selected {} packages.'.format(len(selected))) - - # create relations - for key, parms in third_parties_data: - try: - depends = parms['depends'] - mask = parms['mask'] - # depends valid - valid = is_valid(key, mask) - # depends blacklisted - blacklisted = is_blacklisted(parameters.blacklist, parameters.no_blacklist, key) - if (depends is not None) and valid and (not blacklisted): - for depend in depends: - nodes_key = search_nodes_by_key(nodes, key) - nodes_depend = search_nodes_by_key(nodes, depend) - for nk in nodes_key: - for nd in nodes_depend: - nk.needs(nd) - except KeyError: - # no need create relations - pass - - - # 1/7: Generate solutions in each node - solutions = [] - for key, select_node in selected: - resolved = [] - if not parameters.build_only: - select_node.resolver(resolved, []) - solutions.append( resolved ) - else: - solutions.append( [select_node] ) - - - # 2/7: clean subset - groups = clean_subset(solutions) - - - # 3/7: merge solutions with same root - sols3 = {} - for packages in groups: - first = packages[0] - if first not in sols3: - sols3[first] = [] - chunk = sols3[first] - for node in packages: - if node != first: - if node not in chunk: - chunk.append(node) - - - # 4/7: write final plan - groups = [] - for key, value in sols3.items(): - newsolution = [key] - for node in value: - newsolution.append(node) - groups.append(newsolution) - - - # 5/7: clean subset - groups = clean_subset(groups) - - # 6/7: sort groups - groups_ordered = [] - for packages in groups: - priority_total = 0 - for node in packages: - priority_total += node.get_priority() - priority_group = (priority_total / len(packages)) - groups_ordered.append( (priority_group, packages) ) - groups_ordered.sort(key=lambda tup: tup[0], reverse=False) - - # 7/7: validate groups - for priority_total, packages in groups_ordered: - if len(packages) > 0: - priority_initial = packages[0].get_priority() - for node in packages: - if priority_initial != node.get_priority(): - logging.error('[ERROR] You are mixing packages of different layers.') - logging.error('Invalid priority (%d) in package %s, expected %d:' % (node.get_priority(), node.get_package_name(), priority_initial)) - logging.error('Any in group have bad depends:') - for node in packages: - sys.stdout.write('%s, ' % node.get_package_name()) - sys.stdout.write('\n') - sys.exit(1) - - # show groups in --plan - if len(groups_ordered) > 0: - priority_prev = groups_ordered[0][0] - i = 0 - for priority_total, packages in groups_ordered: - if parameters.quiet: - j = 0 - for node in packages: - sys.stdout.write("%s" % node.get_package_name()) - if ((len(packages)-1) != j): - sys.stdout.write(";") - j += 1 - sys.stdout.write('\n') - else: - if (priority_total > priority_prev) or (i == 0): - if priority_total in alias_priority_name: - layer_name = alias_priority_name[priority_total] - else: - layer_name = '%d' % priority_total - sys.stdout.write('\nLayer: %s\n\n' % layer_name) - sys.stdout.write("\t[") - j = 0 - for node in packages: - sys.stdout.write("%s" % node.get_package_name()) - if ((len(packages)-1) != j): - sys.stdout.write(", ") - j += 1 - sys.stdout.write("]") - sys.stdout.write('\n') - - priority_prev = priority_total - i += 1 - sys.stdout.write('\n') - sys.stdout.flush() - else: - logging.warning('No results.') - # with --plan flag is like use --dry-run - if parameters.plan: - sys.exit(0) - - try: - rets = OrderedDict() - unittests = OrderedDict() - skipping_if_priority_gt = 999 - announce_once = False - # - # pipeline: prepare, compile, packing, run_tests - # - for priority_group, packages in groups_ordered: - - if priority_group > skipping_if_priority_gt: - if not announce_once: - logging.error("ignoring group because some previous group are failing:") - logging.warning('\tgroup is formed by:') - announce_once = True - else: - logging.warning('') - for node in packages: - logging.warning(' -- %s' % node.get_package_name()) - continue - - if len(packages) > 1: - logging.info('--- Start group ---') - for node in packages: - logging.info('- %s' % node.get_package_name()) - # prepare include scripts - node.generate_scripts_headers(compiler_replace_maps) - - try: - if not parameters.no_purge: - run_purge(packages) - - # create pipeline - p = pipeline.make_pipe() - - # feed third parties - p = pipeline.feed(packages)(p) - - if not parameters.no_prepare: - # download sources - p = pipeline.do(prepare, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_compilation: - # ./configure && make (configuration and compilation) - p = pipeline.do(compilation, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_packing: - # packing (generate .tar.gz) - p = pipeline.do(packing, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_run_tests: - # execute unittests and save results in "unittests" - p = pipeline.do(run_tests, False, parameters, compiler_replace_maps, unittests)(p) - - if not parameters.no_upload: - # upload artifacts - p = pipeline.do(upload, False, parameters, compiler_replace_maps)(p) - - # save results in "rets" - p = get_return_code(parameters, rets)(p) - - # close pipe - pipeline.end_pipe()(p) - - except FailThirdParty as e: - skipping_if_priority_gt = priority_group - logging.error("stopping full group.") - - except exceptions_fail_group: - logging.warning('Fatal exception in group:') - for node in packages: - logging.warning('-- %s' % node.get_package_name()) - - finally: - # only purge when you are executing a full group - if (not parameters.build_only) and (not parameters.no_purge): - if parameters.purge_if_fail: - run_purge(packages) - else: - # purge only if all packages are ok - ret = 0 - for node in packages: - ret += node.ret - - if ret == 0: - run_purge(packages) - else: - if len(packages) > 1: - logging.warning('Any in group is failing. No purge next group:') - for node in packages: - logging.warning(' %s' % node.get_package_name()) - else: - logging.warning('No purge %s because finished with fail' % node.get_package_name()) - - except exceptions_fail_program: - logging.warning('Force explicit exit ...') - finally: - ret = show_results(parameters, groups_ordered, rets, unittests) - sys.exit(ret) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py b/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py deleted file mode 100644 index 4ab073a..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py +++ /dev/null @@ -1,233 +0,0 @@ -import os -import sys -import logging -import argparse -from requests import get # to make GET request -from io import StringIO -import csv -import utils -import functools - -version_separator = '.' -version_count_max = 4 - - -# def read_remote_csv(url): -# fp = urllib.request.urlopen(url) -# mybytes = fp.read() -# content = mybytes.decode("utf8") -# fp.close() -# return content - - -def read_remote_csv(url): - response = get(url) - response = response.content.decode("utf8") - return response - - -def version_to_tuple(version_str): - try: - if (version_str is not None) and (len(version_str) > 0): - count = len(version_str.split(version_separator)) - list_data = [int(x) for x in version_str.split(version_separator)] - zeros = [0 for x in range(version_count_max - count)] - list_data.extend(zeros) - return tuple(list_data) - else: - return None - except ValueError: - return None - - -class package(object): - def __init__(self, name, version, local): - self._name = name - self._version = version_to_tuple(version) - self._local = local - - def __repr__(self): - if self._version is not None: - list_version = list(self._version) - list_version = [str(x) for x in list_version] - join_version = version_separator.join(list_version) - else: - join_version = "last" - return "%s;%s" % (self._name, join_version) - - def __eq__(self, other): - return (self._name == other._name) or (self._name == '.') or (other._name == '.') - - def __ne__(self, other): - return not self.__eq__(other) - - def is_same_version(self, other): - return self._version == other._version - - def get_name(self): - return self._name - - def get_version(self): - return self._version - - def is_local(self): - return self._local - - -def sort_versions(local_swap): - if not local_swap: - one = 1 - else: - one = -1 - - def cmp(a, b): - if a.get_version() < b.get_version(): - return 1 - elif a.get_version() > b.get_version(): - return -1 - else: - if a.is_local() and not b.is_local(): - return -one - elif a.is_local() and b.is_local(): - return one - elif not a.is_local() and b.is_local(): - return one - else: - return one - return cmp - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--artifacts', dest='artifacts', help='3rdparty path with cmakefiles', default=None) - parser.add_argument('--server', dest='server', help='artifact server', default=None) - """ - Existe un valor especial de name ".". Sirve para hacer un listado de todos los artefactos - """ - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - """ - La version fijada tiene la siguiente prioridad: - - Version fijada mediante parametros - - Version fijada mediante fichero de dependencias - - Version ultima - """ - parser.add_argument('--version', dest='version', help='version package fixed', default=None) - # TODO: packagename-1.0.0.0-windows_32-msvc_2015-debug - # --platform deberia filtrar artefactos compatibles con "MI PLATAFORMA" - parser.add_argument('--platform', dest='platform', help='platform specified', default=None) - # --compiler deberia filtrar artefactos compatibles con "MI COMPILADOR" - parameters = parser.parse_args() - - package_request = package(parameters.name, parameters.version, True) - packages_found = [] - - if parameters.artifacts is not None: - # local - utils.trymkdir(parameters.artifacts) - for path in os.listdir(parameters.artifacts): - full_path = os.path.join(parameters.artifacts, path) - # directorios que contengan "-" - if os.path.isdir(full_path) and (full_path.find('-') != -1): - basename = os.path.basename(full_path) - try: - separator = basename.rindex('-') - package_name = basename[:separator] - package_version = basename[separator+1:] - new_package = package(package_name, package_version, True) - if new_package == package_request: - packages_found.append(new_package) - except ValueError: - pass # happen with 3rdpartyversions - - """ - Buscar paquetes recien generados - """ - if parameters.artifacts is not None: - # local - basename = None - for path in os.listdir(parameters.artifacts): - full_path = os.path.join(parameters.artifacts, path) - terminator = '-cmake.tar.gz' - if os.path.isfile(full_path) and (full_path.endswith(terminator)): - if parameters.platform is None: - logging.error('Platform is needed!') - sys.exit(1) - terminator = '-%s-cmake.tar.gz' % parameters.platform - basename = os.path.basename(full_path) - try: - if basename is not None: - separator = basename.rindex(terminator) - basename = basename[:separator] - separator = basename.rindex('-') - package_name = basename[:separator] - package_version = basename[separator+1:] - new_package = package(package_name, package_version, True) - if new_package == package_request: - packages_found.append(new_package) - except ValueError: - # not found platform in file - pass - - - if parameters.server is not None: - try: - if not parameters.server.endswith('?quiet'): - parameters.server = parameters.server + '/' + '?quiet' - csv_content = read_remote_csv(parameters.server) - reader = csv.reader(StringIO(csv_content), delimiter=';') - i = 0 - for row in reader: - if len(row) >= 2: - if i > 0: - package_name = row[0] - package_version = row[1] - package_platform = row[2] - new_package = package(package_name, package_version, False) - if (parameters.platform is None) or (parameters.platform == package_platform): - if new_package == package_request: - packages_found.append(new_package) - i += 1 - except IOError: - logging.debug('error in cache artifacts: %s' % parameters.server) - - - if len(packages_found) > 0: - - if parameters.version is None: - """ - Cuando no hay version, ordeno de mayor a menor. - Al pasar False al comparador aparece primero local y luego remote en caso de ser la misma version. - Selecciona el primero y sale. - """ - for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(False))): - if package_request.is_same_version(package): - print("EXACT;%s;%s" % (package, package.get_version())) - else: - print("COMPATIBLE;%s;%s" % (package, package.get_version())) - if parameters.name != '.': - sys.exit(0) - else: - """ - Cuando se especifica una version minima - Se ordena a la inversa, es decir de menor a mayor. - Se coge el primer paquete que cumple la restriccion de version. - Al pasar True al comparador hace que en caso de empate se mantenga a pesar del reverse que - aparece primero versiones locales y luego las remotas. - """ - for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(True)), reverse=True): - if package.get_version() >= package_request.get_version(): - if package_request.is_same_version(package): - print("EXACT;%s;%s" % (package, package.get_version())) - else: - print("COMPATIBLE;%s;%s" % (package, package.get_version())) - if parameters.name != '.': - sys.exit(0) - else: - print("UNSUITABLE;;") - sys.exit(1) - -# if __name__ == '__main__': -# csv_content = read_remote_csv('http://localhost:8080') -# reader = csv.reader(StringIO(csv_content), delimiter=';') -# print(list(reader)) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/common.yml b/node_modules/npm-mas-mas/cmaki_generator/common.yml deleted file mode 100644 index 11a2c76..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/common.yml +++ /dev/null @@ -1,498 +0,0 @@ -compilation_environments: &compilation_environments - windows_32-msvc_msc_ver_*-*: - generator: "Visual Studio 15 2017" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_msc_ver_*-*: - generator: "Visual Studio 15 2017 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-msvc_2015-*: - generator: "Visual Studio 14 2015" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_2015-*: - generator: "Visual Studio 14 2015 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-msvc_2017-*: - generator: "Visual Studio 15 2017" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_2017-*: - generator: "Visual Studio 15 2017 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-gcc_4-*: - generator: "Unix Makefiles" - ext_dyn: dll.a - ext_sta: a - windows_64-gcc_4-*: - generator: "Unix Makefiles" - ext_dyn: dll.a - ext_sta: a - linux_*_glibc_2.*-*_*-*: - generator: "Unix Makefiles" - ext_dyn: so - ext_sta: a - macos_64-clang_*-*: - generator: "Unix Makefiles" - ext_dyn: dylib - ext_sta: a - android_arm_api_*-gcc_*-*: - generator: "Unix Makefiles" - ext_dyn: so - ext_sta: a - -thirdparty_defaults: &thirdparty_defaults - platforms: - <<: *compilation_environments - version: null - version_manager: git - mask: wlmea - mode: dri - depends: null - source: skip - packing: true - build_windows: - | - @echo off - set CMAKI_INSTALL=%SELFHOME% - npm install - unittest: - | - int main() { return 0; } - priority: 30 - -library_dynamic: &library_dynamic - common: &library_dynamic_common - include: - - $PLATFORM/include - - include - windows: &library_dynamic_windows - <<: *library_dynamic_common - dynamic: - debug: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - relwithdebinfo: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - release: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - - unix: &library_dynamic_unix - <<: *library_dynamic_common - dynamic: - debug: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - relwithdebinfo: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - release: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - windows_*-msvc_*-*: - <<: *library_dynamic_windows - default: - <<: *library_dynamic_unix - -executable: &executable - windows: &executable_windows_common - executable: - release: - bin: - /*${TARGET}.exe/ - unix: &executable_unix_common - executable: - release: - bin: - /*${TARGET}/ - windows_*-msvc_*-*: - <<: *executable_windows_common - default: - <<: *executable_unix_common - -executable_exact: &executable_exact - windows: &executable_exact_windows_common - executable: - release: - bin: - - ${TARGET}.exe - - bin/${TARGET}.exe - - dll/${TARGET}.exe - debug: - bin: - - ${TARGET}.exe - - bin/${TARGET}.exe - - dll/${TARGET}.exe - unix: &executable_exact_unix_common - executable: - release: - bin: - - $TARGET - - bin/$TARGET - - dll/$TARGET - debug: - bin: - - $TARGET - - bin/$TARGET - - dll/$TARGET - windows_*-msvc_*-*: - <<: *executable_exact_windows_common - default: - <<: *executable_exact_unix_common - -library_dynamic_exact: &library_dynamic_exact - common: &library_dynamic_exact_common - include: - - $PLATFORM/include - - include - windows: &library_dynamic_exact_windows - <<: *library_dynamic_exact_common - dynamic: - debug: - dll: - - ${TARGET}d.dll - - bin/${TARGET}d.dll - - Debug/${TARGET}d.dll - - dll/${TARGET}d.dll - - ${TARGET}_D.dll - - bin/${TARGET}_D.dll - - Debug/${TARGET}_D.dll - - dll/${TARGET}_D.dll - - $TARGET.dll - - bin/$TARGET.dll - - Debug/$TARGET.dll - - dll/$TARGET.dll - lib: - - ${TARGET}d.lib - - lib/${TARGET}d.lib - - bin/${TARGET}d.lib - - Debug/${TARGET}d.lib - - dll/${TARGET}d.lib - - ${TARGET}_D.lib - - lib/${TARGET}_D.lib - - bin/${TARGET}_D.lib - - Debug/${TARGET}_D.lib - - dll/${TARGET}_D.lib - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - Debug/$TARGET.lib - - dll/$TARGET.lib - pdb: - - ${TARGET}d.pdb - - pdb/${TARGET}d.pdb - - bin/${TARGET}d.pdb - - Debug/${TARGET}d.pdb - - dll/${TARGET}d.pdb - - ${TARGET}_D.pdb - - pdb/${TARGET}_D.pdb - - bin/${TARGET}_D.pdb - - Debug/${TARGET}_D.pdb - - dll/${TARGET}_D.pdb - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - Debug/$TARGET.pdb - - dll/$TARGET.pdb - relwithdebinfo: - dll: - - $TARGET.dll - - bin/$TARGET.dll - - RelWithDebInfo/$TARGET.dll - - dll/$TARGET.dll - lib: - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - RelWithDebInfo/$TARGET.lib - - dll/$TARGET.lib - pdb: - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - RelWithDebInfo/$TARGET.pdb - - dll/$TARGET.pdb - release: - dll: - - $TARGET.dll - - bin/$TARGET.dll - - Release/$TARGET.dll - - dll/$TARGET.dll - lib: - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - Release/$TARGET.lib - - dll/$TARGET.lib - pdb: - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - Release/$TARGET.pdb - - dll/$TARGET.pdb - - unix: &library_dynamic_exact_unix - <<: *library_dynamic_exact_common - dynamic: - debug: - so: - - Debug/lib${TARGET}d.$EXT_DYN - - Debug/lib${TARGET}_D.$EXT_DYN - - Debug/lib${TARGET}_debug.$EXT_DYN - - Debug/lib${TARGET}-d.$EXT_DYN - - Debug/lib${TARGET}.$EXT_DYN - ##################### - - bin/lib${TARGET}d.$EXT_DYN - - bin/lib${TARGET}_D.$EXT_DYN - - bin/lib${TARGET}_debug.$EXT_DYN - - bin/lib${TARGET}-d.$EXT_DYN - - bin/lib${TARGET}.$EXT_DYN - ##################### - - lib/lib${TARGET}d.$EXT_DYN - - lib/lib${TARGET}_D.$EXT_DYN - - lib/lib${TARGET}_debug.$EXT_DYN - - lib/lib${TARGET}-d.$EXT_DYN - - lib/lib${TARGET}.$EXT_DYN - ##################### - - lib${ARCH}/lib${TARGET}d.$EXT_DYN - - lib${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib${ARCH}/lib${TARGET}.$EXT_DYN - ##################### - - lib${TARGET}d.$EXT_DYN - - lib${TARGET}_D.$EXT_DYN - - lib${TARGET}_debug.$EXT_DYN - - lib${TARGET}-d.$EXT_DYN - - lib${TARGET}.$EXT_DYN - ###################### - - lib/${ARCH}/lib${TARGET}d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}.$EXT_DYN - relwithdebinfo: - so: - - RelWithDebInfo/lib${TARGET}d.$EXT_DYN - - RelWithDebInfo/lib${TARGET}_D.$EXT_DYN - - RelWithDebInfo/lib${TARGET}_debug.$EXT_DYN - - RelWithDebInfo/lib${TARGET}-d.$EXT_DYN - - RelWithDebInfo/lib${TARGET}.$EXT_DYN - ##################### - - bin/lib${TARGET}d.$EXT_DYN - - bin/lib${TARGET}_D.$EXT_DYN - - bin/lib${TARGET}_debug.$EXT_DYN - - bin/lib${TARGET}-d.$EXT_DYN - - bin/lib${TARGET}.$EXT_DYN - ##################### - - lib/lib${TARGET}d.$EXT_DYN - - lib/lib${TARGET}_D.$EXT_DYN - - lib/lib${TARGET}_debug.$EXT_DYN - - lib/lib${TARGET}-d.$EXT_DYN - - lib/lib${TARGET}.$EXT_DYN - ##################### - - lib${ARCH}/lib${TARGET}d.$EXT_DYN - - lib${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib${ARCH}/lib${TARGET}.$EXT_DYN - ##################### - - lib${TARGET}d.$EXT_DYN - - lib${TARGET}_D.$EXT_DYN - - lib${TARGET}_debug.$EXT_DYN - - lib${TARGET}-d.$EXT_DYN - - lib${TARGET}.$EXT_DYN - ###################### - - lib/${ARCH}/lib${TARGET}d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}.$EXT_DYN - release: - so: - - Release/lib$TARGET.$EXT_DYN - - bin/lib$TARGET.$EXT_DYN - - lib/lib$TARGET.$EXT_DYN - - lib${ARCH}/lib$TARGET.$EXT_DYN - - lib$TARGET.$EXT_DYN - - lib/${ARCH}/lib$TARGET.$EXT_DYN - windows_*-msvc_*-*: - <<: *library_dynamic_exact_windows - default: - <<: *library_dynamic_exact_unix - -library_static: &library_static - common: &library_static_common - include: - - $PLATFORM/include - - include - static: - debug: - lib: - /*$TARGET*.$EXT_STA/ - relwithdebinfo: - lib: - /*$TARGET*.$EXT_STA/ - release: - lib: - /*$TARGET*.$EXT_STA/ - windows_*-msvc_*-*: - <<: *library_static_common - default: - <<: *library_static_common - -library_static_exact: &library_static_exact - common: &library_static_exact_common - include: - - $PLATFORM/include - - include - static: - debug: - lib: - - Debug/lib${TARGET}d.$EXT_STA - - Debug/lib${TARGET}-d.$EXT_STA - - Debug/lib${TARGET}.$EXT_STA - - Debug/${TARGET}d.$EXT_STA - - Debug/${TARGET}-d.$EXT_STA - - Debug/${TARGET}.$EXT_STA - ################ - - lib${TARGET}d.$EXT_STA - - lib${TARGET}-d.$EXT_STA - - lib${TARGET}.$EXT_STA - - ${TARGET}d.$EXT_STA - - ${TARGET}-d.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}d.$EXT_STA - - lib/lib${TARGET}-d.$EXT_STA - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}d.$EXT_STA - - lib/${TARGET}-d.$EXT_STA - - lib/${TARGET}.$EXT_STA - relwithdebinfo: - lib: - - RelWithDebInfo/lib${TARGET}d.$EXT_STA - - RelWithDebInfo/lib${TARGET}-d.$EXT_STA - - RelWithDebInfo/lib${TARGET}.$EXT_STA - - RelWithDebInfo/${TARGET}d.$EXT_STA - - RelWithDebInfo/${TARGET}-d.$EXT_STA - - RelWithDebInfo/${TARGET}.$EXT_STA - ################ - - lib${TARGET}d.$EXT_STA - - lib${TARGET}-d.$EXT_STA - - lib${TARGET}.$EXT_STA - - ${TARGET}d.$EXT_STA - - ${TARGET}-d.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}d.$EXT_STA - - lib/lib${TARGET}-d.$EXT_STA - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}d.$EXT_STA - - lib/${TARGET}-d.$EXT_STA - - lib/${TARGET}.$EXT_STA - release: - lib: - - Release/lib${TARGET}.$EXT_STA - - Release/${TARGET}.$EXT_STA - ################ - - lib${TARGET}.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}.$EXT_STA - windows_*-msvc_*-*: - <<: *library_static_exact_common - default: - <<: *library_static_exact_common - -# when need distribute dll (only windows) but dont need linking -library_only_dll: &library_only_dll - windows: &library_only_dll_windows - add_3rdparty_dependencies: false - lib_provided: false - dynamic: - debug: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - relwithdebinfo: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - release: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - -library_dynamic_boost: &library_dynamic_boost - common: &common_boost - include: - - $PLATFORM/include - - include - definitions: - - -D${PACKAGE_UPPER}_DYN_LINK - windows: &windows_dynamic_boost - <<: *common_boost - dynamic: - debug: - dll: - /$TARGET-*-mt-*d-*_*.dll/ - lib: - /$TARGET-*-mt-*d-*_*.lib/ - pdb: - null - relwithdebinfo: - dll: - /$TARGET-*-mt-*_*.dll/ - lib: - /$TARGET-*-mt-*_*.dll/ - pdb: - null - release: - dll: - /$TARGET-*-mt-*_*.dll/ - lib: - /$TARGET-*-mt-*_*.lib/ - pdb: - null - unix: &unix_dynamic_boost - <<: *common_boost - dynamic: - debug: - so: - /lib$TARGET-*-mt-*d-*_*.$EXT_DYN/ - relwithdebinfo: - so: - /lib$TARGET-*-mt-*_*.$EXT_DYN/ - release: - so: - /lib$TARGET-*-mt-*_*.$EXT_DYN/ - windows_*-msvc_*-*: - <<: *windows_dynamic_boost - default: - <<: *unix_dynamic_boost - diff --git a/node_modules/npm-mas-mas/cmaki_generator/compilation.py b/node_modules/npm-mas-mas/cmaki_generator/compilation.py deleted file mode 100644 index b80af0f..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/compilation.py +++ /dev/null @@ -1,238 +0,0 @@ -import os -import utils -import logging -import pipeline -from itertools import product -from third_party import platforms -from third_party import CMAKELIB_URL -from third_party import get_identifier - - -def search_cmakelib(): - # compilando desde cmaki_generator - cmakelib_dir = os.path.join('output', '3rdparties', 'cmaki') - if not os.path.isdir(cmakelib_dir): - # compilando una dependencia - cmakelib_dir = os.path.join('..', 'cmaki') - if not os.path.isdir(cmakelib_dir): - # compilando proeycto raiz - cmakelib_dir = os.path.join('node_modules', 'cmaki') - if not os.path.isdir(cmakelib_dir): - raise Exception("not found cmaki: {}".format(os.path.abspath(cmakelib_dir))) - return os.path.abspath(cmakelib_dir) - - -def compilation(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - package_norm = node.get_package_name_norm() - version = node.get_version() - - cmake3p_dir = parameters.prefix - cmake3p_dir = utils.get_norm_path(cmake3p_dir) - cmake3p_dir = cmake3p_dir.replace('\\', '/') - - cmakefiles_dir = search_cmakelib() - - package_upper = node.get_package_name_norm_upper() - parms = node.parameters - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - install_directory = node.get_install_directory(plat) - utils.trymkdir(install_directory) - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - utils.trymkdir(build_directory) - with utils.working_directory(build_directory): - # get generator and platform info - for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): - - logging.info('-- compilation mode: %s plat: %s' % (build_mode, plat)) - - ############# 1. prepare vars - - if build_mode.lower() == 'debug': - try: - env_modified['CFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' - elif build_mode.lower() == 'relwithdebinfo': - try: - env_modified['CFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-g -O2 -DNDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-g -O2 -DNDEBUG' - elif build_mode.lower() == 'release': - # default packages assume came in release - try: - env_modified['CFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-O3 -DNDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-O3 -DNDEBUG' - - cores = utils.detect_ncpus() - half_cores = cores / 2 - env_modified['CORES'] = str(cores) - env_modified['HALF_CORES'] = str(half_cores) - env_modified['GTC_PREFIX'] = parameters.prefix - env_modified['CMAKELIB_URL'] = CMAKELIB_URL - env_modified['BUILD_MODE'] = str(build_mode) - # env_modified['NPP_SERVER'] = ... - env_modified['SOURCES'] = os.path.abspath(os.path.join('..', node.get_download_directory())) - env_modified['CMAKI_DIR'] = cmakefiles_dir - env_modified['SELFHOME'] = install_directory - env_modified['CMAKI_PWD'] = build_directory - env_modified['CMAKI_INSTALL'] = install_directory - - ################# - # remove cmake3p of node - node.remove_cmake3p(cmake3p_dir) - - # show env vars - node.show_environment_vars(env_modified) - - # remove CMakeCache.txt for avoid problems when - # change of generator - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - ################# - - generator_extra = '' - if generator is not None: - generator_extra = '-G"%s"' % generator - - cmakefiles_dir = parameters.cmakefiles - cmakefiles_dir = cmakefiles_dir.replace('\\', '/') - - cmake_prefix_path = parameters.third_party_dir - cmake_prefix_path = cmake_prefix_path.replace('\\', '/') - - build_directory = build_directory.replace('\\', '/') - - # resolve replace maps - compiler_replace_resolved = {} - for var, value in compiler_replace_maps.items(): - newvalue = value - newvalue = newvalue.replace('$PLATFORM', plat) - compiler_replace_resolved[var] = newvalue - - # begin definitions cmake - try: - cmake_definitions_list_original = parms['cmake_definitions'] - cmake_definitions_list = [] - for define in cmake_definitions_list_original: - # TODO: resolver tus variables directas e indirectas (de dependencias) - define = define.replace('$%s_HOME' % package_norm, install_directory) - # apply replaces - cmake_definitions_list.append( utils.apply_replaces(define, compiler_replace_resolved) ) - except KeyError: - cmake_definitions_list = [] - - # add cflags and cppflags to cmake_definitions - try: - cmake_definitions_list.append( 'CMAKE_C_FLAGS="%s"' % env_modified['CFLAGS'] ) - except KeyError: - pass - try: - cmake_definitions_list.append( 'CMAKE_CXX_FLAGS="%s"' % env_modified['CPPFLAGS'] ) - except KeyError: - pass - - definitions_extra = '' - for definition in cmake_definitions_list: - definitions_extra += ' -D%s' % definition - # end definitions cmake - - if (not 'CMAKE_TOOLCHAIN_FILE' in env_modified) or (not env_modified['CMAKE_TOOLCHAIN_FILE']) or (env_modified['CMAKE_TOOLCHAIN_FILE'] == "no cross compile"): - cmake_toolchain_file_filepath='' - else: - cmake_toolchain_file_filepath=' -DCMAKE_TOOLCHAIN_FILE="{}"'.format(env_modified['CMAKE_TOOLCHAIN_FILE']) - - cmake_prefix = node.get_cmake_prefix() - cmake_configure = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKE_MODULE_PATH=%s -DCMAKI_PATH=%s -DCMAKE_BUILD_TYPE=%s -DCMAKE_PREFIX_PATH=%s -DPACKAGE=%s -DPACKAGE_UPPER=%s -DPACKAGE_VERSION=%s -DPACKAGE_BUILD_DIRECTORY=%s -DCMAKI_COMPILER=%s -DCMAKI_IDENTIFIER=%s -DCMAKI_PLATFORM=%s %s %s' % (generator_extra, cmake_prefix, cmake3p_dir, cmakefiles_dir, cmakefiles_dir, build_mode, cmake_prefix_path, package, package_upper, version, build_directory, get_identifier('COMPILER'), get_identifier('ALL'), get_identifier('ALL'), definitions_extra, cmake_toolchain_file_filepath) - - target = node.get_cmake_target() - if target is not None: - cmake_build = 'cmake --build . --target %s --config %s' % (target, build_mode) - else: - cmake_build = 'cmake --build . --config %s' % build_mode - - env_modified['CMAKE_CONFIGURE'] = cmake_configure.replace(r'"', r"'") - env_modified['CMAKE_BUILD'] = cmake_build.replace(r'"', r"'") - - ########## 2. execute - - executed_build_script = False - if utils.is_windows(): - for build_script in ['.build.cmd', 'build.cmd']: - if os.path.exists(build_script): - # execute manual build script - node.ret += abs(utils.safe_system('%s %s %s %s %s %s' % (build_script, install_directory, package, version, plat, build_mode), env=env_modified)) - executed_build_script = True - else: - for build_script in ['.build.sh', 'build.sh']: - if os.path.exists(build_script): - # show vars - node.show_environment_vars(env_modified) - - node.ret += abs(utils.safe_system('chmod +x %s && ./%s %s %s %s %s %s' % (build_script, build_script, install_directory, package, version, plat, build_mode), env=env_modified)) - executed_build_script = True - - if not executed_build_script: - logging.debug('configure command: %s' % cmake_configure) - - ret = utils.safe_system(cmake_configure, env=env_modified) - if ret == 0: - logging.debug('build command: %s' % cmake_configure) - node.ret += abs(utils.safe_system(cmake_build, env=env_modified)) - else: - logging.warning('Configuration failed. See log: %s' % parameters.log) - node.ret += abs(ret) - - ######## 3. manual install - - # post-install - logging.debug('begin post-install') - for bc in node.get_post_install(): - chunks = [x.strip() for x in bc.split(' ') if x] - if(len(chunks) != 2) and (len(chunks) != 3): - raise Exception('Invalid value in post_install: %s. Expected [source pattern destiny]' % bc) - - source_folder = os.path.join(build_directory, os.path.dirname(chunks[0])) - install_directory_chunk = os.path.join(install_directory, chunks[1]) - pattern = os.path.basename(chunks[0]) - logging.debug('copy %s/%s to %s' % (source_folder, pattern, install_directory_chunk)) - - # create directory if not exists - utils.trymkdir(install_directory_chunk) - - p = pipeline.make_pipe() - # begin - if len(chunks) == 3: - p = pipeline.find(source_folder, 99)(p) - else: - p = pipeline.find(source_folder, 0)(p) - p = pipeline.grep_basename(pattern)(p) - p = pipeline.copy(source_folder, install_directory_chunk)(p) - p = pipeline.debug('copied ')(p) - # end - pipeline.end_pipe()(p) - logging.debug('end post-install') - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "compilation"') - break - - # finish well - return True diff --git a/node_modules/npm-mas-mas/cmaki_generator/download_package.py b/node_modules/npm-mas-mas/cmaki_generator/download_package.py deleted file mode 100644 index 23fc656..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/download_package.py +++ /dev/null @@ -1,11 +0,0 @@ -from requests import get # to make GET request - -def download_from_url(url, file_name): - with open(file_name, "wb") as file: - response = get(url) - file.write(response.content) - -url = 'http://localhost:8080/cpp/download.php?file=json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz' - -print( download_from_url(url, "json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz") ) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/get_package.py b/node_modules/npm-mas-mas/cmaki_generator/get_package.py deleted file mode 100755 index e450ee0..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/get_package.py +++ /dev/null @@ -1,26 +0,0 @@ -import os -import sys -import logging -import argparse -import urllib -import csv -import utils - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) - parameters = parser.parse_args() - - depends_file = parameters.depends - if os.path.exists(depends_file): - data = utils.deserialize(depends_file) - # data = utils.deserialize_json(depends_file) - else: - data = {} - if parameters.name in data: - print (data[parameters.name]) - sys.exit(0) - else: - sys.exit(1) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py b/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py deleted file mode 100644 index c407dd7..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging - - -def set_state(rets, key, value): - if not key in rets: - rets[key] = value - else: - logging.warning('Received in pipeline multiples packages with same name and version: %s' % key) - set_state(rets, key + '_', value) - - -def get_return_code(parameters, rets): - def process(packages): - for node in packages: - try: - # process package - name = node.get_package_name() - version = node.get_version() - - if len(node.exceptions) > 0: - state = "EXCEPTION in %s" % node.fail_stage - elif node.interrupted: - state = "INTERRUPTED in %s" % node.fail_stage - elif (node.ret != 0): - state = "FAILED in %s" % node.fail_stage - else: - state = "OK" - - key = '%s - %s' % (name, version) - set_state(rets, key, state) - finally: - # send to next step - yield node - return process - diff --git a/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt deleted file mode 100644 index 2d06137..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt +++ /dev/null @@ -1,47 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) - -include_directories(gwen/include) - -#ADD_DEFINITIONS(-DGWEN_COMPILE_STATIC -D_HAS_EXCEPTIONS=0 -D_STATIC_CPPLIB) -ADD_DEFINITIONS(-DGWEN_COMPILE_DLL) - -IF(WIN32) - -ELSE() - add_definitions(-std=c++11) -ENDIF() - -file(GLOB SOURCE_CODE1 gwen/src/*.cpp) -file(GLOB SOURCE_CODE2 gwen/src/Controls/*.cpp) -file(GLOB SOURCE_CODE3 gwen/src/Controls/Dialog/*.cpp) -file(GLOB SOURCE_CODE4 gwen/src/Platforms/*.cpp) - -add_library(${PACKAGE} SHARED ${SOURCE_CODE1} ${SOURCE_CODE2} ${SOURCE_CODE3} ${SOURCE_CODE4}) - -file(GLOB HEADER_CODE1 gwen/include/Gwen/*.h) -INSTALL( FILES ${HEADER_CODE1} - DESTINATION "include/${PACKAGE}") - -file(GLOB HEADER_CODE2 gwen/include/Gwen/Controls/*.h) -INSTALL( FILES ${HEADER_CODE2} - DESTINATION "include/${PACKAGE}/Controls") - -file(GLOB HEADER_CODE3 gwen/include/Gwen/Controls/Dialog/*.h) -INSTALL( FILES ${HEADER_CODE3} - DESTINATION "include/${PACKAGE}/Controls/Dialog") - -file(GLOB HEADER_CODE4 gwen/include/Gwen/Input/*.h) -INSTALL( FILES ${HEADER_CODE4} - DESTINATION "include/${PACKAGE}/Input") - -file(GLOB HEADER_CODE5 gwen/include/Gwen/Renderers/*.h) -INSTALL( FILES ${HEADER_CODE5} - DESTINATION "include/${PACKAGE}/Renderers") - -file(GLOB HEADER_CODE6 gwen/include/Gwen/Skins/*.h) -INSTALL( FILES ${HEADER_CODE6} - DESTINATION "include/${PACKAGE}/Skins") - -INSTALL( FILES gwen/bin/DefaultSkin.png - DESTINATION "bin") diff --git a/node_modules/npm-mas-mas/cmaki_generator/hash_version.py b/node_modules/npm-mas-mas/cmaki_generator/hash_version.py deleted file mode 100644 index f5e56cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/hash_version.py +++ /dev/null @@ -1,172 +0,0 @@ -import os -import contextlib -import utils -import time -from datetime import datetime -from utils import get_stdout -from email.utils import parsedate - - -def get_revision_svn(repo): - ''' - This command need svn in PATH - ''' - cmd = "svn info %s" % repo - for line in get_stdout(cmd): - if line.startswith('Last') or (line.startswith('Revisi') and (line.find('cambio') != -1)): - pos = line.rindex(':') - return int(line[pos+2:]) - return -1 - - -def get_timestamp_from_changeset(repo, changeset_searched): - ''' - generator of commits - ''' - with utils.working_directory(repo): - lines = [] - for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): - lines.append(line) - for line in reversed(lines): - chunks = line.split(";") - assert(len(chunks) == 2) - changeset = chunks[0] - timestamp = int(time.mktime(parsedate(chunks[1]))) - if changeset_searched == changeset: - return timestamp - raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched)) - - -def git_log_gen(repo, number=1, extra=''): - ''' - generator of commits - ''' - with utils.working_directory(repo): - for line in get_stdout('git log -%d %s' % (number, extra)): - if line.startswith('commit'): - parts = line.split(' ') - assert(len(parts) == 2) - commit_name = parts[1] - yield commit_name - - -def get_changeset_git_from_position(repo, position = 0): - with utils.working_directory(repo): - i = 1 - lines = [] - for line in get_stdout('git log'): - lines.append(line) - for line in reversed(lines): - if line.startswith('commit'): - parts = line.split(' ') - assert(len(parts) == 2) - commit_name = parts[1] - if i == position: - return commit_name - else: - i += 1 - raise Exception('Error in get git hash from position {}'.format(position)) - - -def get_changeset_from_timestamp(repo, timestamp_searched): - with utils.working_directory(repo): - lines = [] - for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): - lines.append(line) - for line in reversed(lines): - chunks = line.split(";") - assert(len(chunks) == 2) - changeset = chunks[0] - timestamp = int(time.mktime(parsedate(chunks[1]))) - if timestamp_searched == timestamp: - return changeset - raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched)) - - -def get_position_git_from_changeset(repo, changeset): - with working_directory(repo): - i = 1 - lines = [] - for line in get_stdout('git log'): - lines.append(line) - for line in reversed(lines): - if line.startswith('commit'): - parts = line.split(' ') - if len(parts) == 2: - commit_name = parts[1] - if commit_name == changeset: - return i - else: - i += 1 - return -1 - - -def get_last_changeset(repo, short=False): - for changeset in git_log_gen(repo, number=1): - if short: - return changeset[:7] - else: - return changeset - return "" - - -def get_last_version(repo): - return to_cmaki_version(repo, get_last_changeset(repo)) - - -def rehash_simple(commit_name, position): - separator = '000' - return int(separator.join(list(str(ord(character)) for character in commit_name))) % position - - -@contextlib.contextmanager -def working_directory(path): - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) - - -def to_cmaki_version(repo, changeset): - ''' - git hash ----> 0.0.x.x - ''' - position = get_timestamp_from_changeset(repo, changeset) - hash_simple = rehash_simple(changeset, position) - versions = [] - versions.append('0') - versions.append('0') - versions.append(str(position)) - versions.append(str(hash_simple)) - return '.'.join(versions) - - -def to_git_version(repo, version): - ''' - 0.0.x.x ----> git hash - ''' - version = version.split('.') - assert(len(version) == 4) - position = int(version[2]) - pseudohash = int(version[3]) - changeset = get_changeset_from_timestamp(repo, position) - hash_simple = rehash_simple(changeset, position) - assert( get_timestamp_from_changeset(repo, changeset) == position ) - assert( hash_simple == pseudohash ) - return changeset - - -if __name__ == '__main__': - - local_path = r'/home/ricardo/dev/fast-event-system' - - for commit_name in git_log_gen(local_path, 10): - cmaki_version = to_cmaki_version(local_path, commit_name) - print ("%s -> %s" % (commit_name, cmaki_version)) - commit_name2 = to_git_version(local_path, cmaki_version) - print ("%s -> %s" % (cmaki_version, commit_name2)) - print () - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl deleted file mode 100644 index 8ba21f4..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BuildName: - BuildStamp: - Name: - Generator: - CompilerName: - OSName: - Hostname: - OSRelease: - OSVersion: - OSPlatform: - Is64Bits: - VendorString: - VendorID: - FamilyID: - ModelID: - ProcessorCacheSize: - NumberOfLogicalCPU: - NumberOfPhysicalCPU: - TotalVirtualMemory: - TotalPhysicalMemory: - LogicalProcessorsPerPhysical: - ProcessorClockFrequency: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/junit/README.md b/node_modules/npm-mas-mas/cmaki_generator/junit/README.md deleted file mode 100644 index 4f989c6..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/junit/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Source -https://bitbucket.org/shackra/ctest-jenkins/ - diff --git a/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt deleted file mode 100644 index bc1e512..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt +++ /dev/null @@ -1,687 +0,0 @@ -#=================================== -# Build script for libRocket ======= -#=================================== - -if(APPLE) - if(IOS_PLATFORM) - set(CMAKE_TOOLCHAIN_FILE cmake/Platform/iOS.cmake) - endif(IOS_PLATFORM) -endif(APPLE) - -# We use the new OSX_ARCHITECTURES property -# and GNUInstallDirs module -cmake_minimum_required(VERSION 2.8.5) - -if(COMMAND cmake_policy) - cmake_policy(SET CMP0015 NEW) -endif(COMMAND cmake_policy) - -project(libRocket C CXX) - -# paths -include(GNUInstallDirs) - -set(LIBROCKET_VERSION_MAJOR 1) -set(LIBROCKET_VERSION_MINOR 3) -set(LIBROCKET_VERSION_PATCH 0) -set(LIBROCKET_VERSION_TWEAK 0) -set(PROJECT_VERSION ${LIBROCKET_VERSION_MAJOR}.${LIBROCKET_VERSION_MINOR}.${LIBROCKET_VERSION_PATCH}.${LIBROCKET_VERSION_TWEAK}) - -# Search in the 'cmake' directory for additional CMake modules. -list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) - -# Old versions of CMake need some updated Modules, but we don't want -# to override newer versions of CMake which have working versions -if(CMAKE_MAJOR_VERSION LESS 3) - list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/v2fixes) -endif() - -#=================================== -# Environment tests ================ -#=================================== - -include(TestForANSIForScope) -include(TestForANSIStreamHeaders) -include(TestForSTDNamespace) - -#=================================== -# Provide hints as to where depends= -# might be found = -#=================================== - -if(NOT DEFINED ENV{FREETYPE_DIR}) - set(ENV{FREETYPE_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{Boost_DIR}) - set(ENV{Boost_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{LUA_DIR}) - set(ENV{LUA_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SDLDIR}) - set(ENV{SDLDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SDLIMAGEDIR}) - set(ENV{SDLIMAGEDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SFML_ROOT}) - set(ENV{SFML_ROOT} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -#=================================== -# Plaform specific global hacks ==== -#=================================== - -if(APPLE) - # Disables naked builtins from AssertMacros.h which - # This prevents naming collisions such as those from the check() - # function macro with LuaType::check - add_definitions(-D__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0) -endif(APPLE) - -#=================================== -# Build options ==================== -#=================================== - -if(NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE Release CACHE STRING - "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." - FORCE) -endif() - -if(NOT IOS) - option(BUILD_SHARED_LIBS "Build shared libraries" ON) -endif(NOT IOS) - -option(BUILD_PYTHON_BINDINGS "Build python bindings" OFF) -option(BUILD_LUA_BINDINGS "Build Lua bindings" OFF) -option(BUILD_SAMPLES "Build samples" OFF) -if(WIN32) - option(SKIP_DIRECTX_SAMPLES "Skip build of all DirectX related samples. Only applies if BUILD_SAMPLES is ON" OFF) - option(SKIP_DIRECTX9_SAMPLE "Skip build of DirectX 9 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) - option(SKIP_DIRECTX10_SAMPLE "Skip build of DirectX 10 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) -endif() - -if(IOS) - if(BUILD_SHARED_LIBS) - message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") - endif(BUILD_SHARED_LIBS) -endif(IOS) - -if(IOS) - if(BUILD_SHARED_LIBS) - message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") - endif(BUILD_SHARED_LIBS) -endif(IOS) - -if(NOT BUILD_SHARED_LIBS) - add_definitions(-DSTATIC_LIB) -endif() - -#on windows, check for VC10 and fix the multiple compile target issue. -IF(WIN32) - if(MSVC) - if(${MSVC_VERSION} STREQUAL 1600 OR ${MSVC_VERSION} STRGREATER 1600) - message("Visual Studio 2010 (${MSVC_VERSION}) build fix at play (/FORCE:MULTIPLE)") - set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /FORCE:MULTIPLE") - endif() - endif() -ENDIF(WIN32) - - -#=================================== -# Find dependencies ================ -#=================================== - -cmaki_find_package(dune-freetype) -include_directories(${DUNE-FREETYPE_INCLUDE_DIRS}) -list(APPEND CORE_LINK_LIBS ${DUNE-FREETYPE_LIBRARIES}) - -# # FreeType -# if(CMAKE_MAJOR_VERSION LESS 3) -# # Freetype changed the layout of its header files, we need to use -# # the FindFreetype module from cmake v3 at least, included here -# find_package(Freetype-v2fix REQUIRED) -# else() -# find_package(Freetype REQUIRED) -# endif() -# -# if(FREETYPE_FOUND) -# include_directories(${FREETYPE_INCLUDE_DIRS}) -# link_directories(${FREETYPE_LINK_DIRS}) -# list(APPEND CORE_LINK_LIBS ${FREETYPE_LIBRARY}) -# endif() -# mark_as_advanced(FREETYPE_INCLUDE_DIRS FREETYPE_LIBRARY FREETYPE_LINK_DIRECTORIES) - -# Boost and Python -if(BUILD_PYTHON_BINDINGS) - find_package(PythonInterp 2 REQUIRED) - find_package(PythonLibs 2 REQUIRED) - execute_process( - COMMAND ${PYTHON_EXECUTABLE} -c "from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,prefix=''))" - OUTPUT_VARIABLE PYTHON_INSTDIR - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - if(PYTHONLIBS_FOUND) - include_directories(${PYTHON_INCLUDE_DIR}) - endif() - - #set(Boost_USE_STATIC_LIBS OFF) - #set(Boost_USE_MULTITHREADED ON) - find_package(Boost 1.40.0 COMPONENTS python REQUIRED) - if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIR}) - list(APPEND PY_BINDINGS_LINK_LIBS ${PYTHON_LIBRARY} ${Boost_LIBRARIES}) - endif() - -endif() - -#Lua -if(BUILD_LUA_BINDINGS) - if(CMAKE_MAJOR_VERSION LESS 3) - find_package(Lua-v2fix) - else() - find_package(Lua) - endif() - if(LUA_FOUND) - include_directories(${LUA_INCLUDE_DIR}) - list(APPEND LUA_BINDINGS_LINK_LIBS ${LUA_LIBRARIES}) - endif() -endif() - - -#=================================== -# Setup paths ====================== -#=================================== - -set(PROJECT_SOURCE_DIR ${PROJECT_SOURCE_DIR}/..) - -include_directories( - ${PROJECT_SOURCE_DIR}/Include -) - -# Include list of source files -include(FileList) - -#=================================== -# Build libraries ================== -#=================================== - -set(LIBRARIES Core Controls Debugger) - -foreach(library ${LIBRARIES}) - set(NAME Rocket${library}) - - add_library(${NAME} ${${library}_SRC_FILES} - ${${library}_HDR_FILES} - ${${library}_PUB_HDR_FILES} - ${MASTER_${library}_PUB_HDR_FILES} - ) - - set_target_properties(${NAME} PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${LIBROCKET_VERSION_MAJOR} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "i386;x86_64;" - ) - endif(NOT IOS) - endif(APPLE) - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - ) -endforeach(library) - -# Build python bindings -if(BUILD_PYTHON_BINDINGS) - set(LIBRARIES core controls) - - foreach(library ${LIBRARIES}) - set(NAME _rocket${library}) - - add_library(${NAME} MODULE ${Py${library}_SRC_FILES} - ${Py${library}_HDR_FILES} - ${Py${library}_PUB_HDR_FILES} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" - ) - endif(NOT IOS) - endif(APPLE) - - set_target_properties(${NAME} PROPERTIES PREFIX "") - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${PYTHON_INSTDIR} - ) - endforeach(library) -endif() - -# Build Lua bindings -if(BUILD_LUA_BINDINGS) - set(LIBRARIES Core Controls) - - foreach(library ${LIBRARIES}) - set(NAME Rocket${library}Lua) - - add_library(${NAME} ${Lua${library}_SRC_FILES} - ${Lua${library}_HDR_FILES} - ${Lua${library}_PUB_HDR_FILES} - ) - - set_target_properties(${NAME} PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${LIBROCKET_VERSION_MAJOR} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" - ) - endif(NOT IOS) - endif(APPLE) - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - ) - endforeach(library) -endif() - - -#=================================== -# Link libraries =================== -#=================================== - -target_link_libraries(RocketCore ${CORE_LINK_LIBS}) -target_link_libraries(RocketControls RocketCore) -target_link_libraries(RocketDebugger RocketCore) - -if(BUILD_PYTHON_BINDINGS) - target_link_libraries(_rocketcore RocketCore ${PY_BINDINGS_LINK_LIBS}) - target_link_libraries(_rocketcontrols RocketControls ${PY_BINDINGS_LINK_LIBS}) -endif() - -if(BUILD_LUA_BINDINGS) - target_link_libraries(RocketCoreLua RocketCore ${LUA_BINDINGS_LINK_LIBS}) - target_link_libraries(RocketControlsLua RocketControls RocketCoreLua ${LUA_BINDINGS_LINK_LIBS}) -endif() - - -#=================================== -# Build samples ==================== -#=================================== - -# Build and link the samples -macro(bl_sample NAME) - if (WIN32) - add_executable(${NAME} WIN32 ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - elseif(APPLE) - add_executable(${NAME} MACOSX_BUNDLE ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - else() - add_executable(${NAME} ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - endif() - - if (APPLE) - # We only support i386 for the samples as it still uses Carbon - set_target_properties(${NAME} PROPERTIES OSX_ARCHITECTURES "i386;" ) - endif() - - target_link_libraries(${NAME} ${ARGN}) -endmacro() - -if(BUILD_SAMPLES) - include(SampleFileList) - - set(samples treeview customlog drag loaddocument) - set(tutorials template datagrid datagrid_tree tutorial_drag) - - set(sample_LIBRARIES - shell - RocketCore - RocketControls - RocketDebugger - ) - - # Find OpenGL - find_package(OpenGL REQUIRED) - - if(OPENGL_FOUND) - include_directories(${OPENGL_INCLUDE_DIR}) - list(APPEND sample_LIBRARIES ${OPENGL_LIBRARIES}) - endif() - - # Set up required system libraries - if(WIN32) - if(SKIP_DIRECTX_SAMPLES) - message("-- Skipping all DirectX samples") - set(SKIP_DIRECTX9_SAMPLE ON) - set(SKIP_DIRECTX10_SAMPLE ON) - else() - message("-- Determing if DirectX samples can be built") - include(FindDirectX) - find_package(DirectX) - if(DirectX_FOUND) - set(DIRECTX_SAMPLE_LIST) - set(DIRECTX_SKIPPED_SAMPLE_LIST) - - # We should be able to build DirectX 9 sample - message("-- Determing if DirectX samples can be built - Yes") - - if(SKIP_DIRECTX9_SAMPLE) - message("-- Skipping build of DirectX 9 sample: User disabled") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - else() - if(DirectX_LIBRARY) - if(DirectX_D3DX9_LIBRARY) - list(APPEND DIRECTX_SAMPLE_LIST "DirectX9 ") - else() - set(SKIP_DIRECTX9_SAMPLE ON) - message("-- Skipping build of DirectX 9 sample: DirectX_D3DX9_LIBRARY not found") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - endif() - else() - set(SKIP_DIRECTX9_SAMPLE ON) - message("-- Skipping build of DirectX 9 sample: DirectX_LIBRARY not found") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - endif() - endif() - - if(SKIP_DIRECTX10_SAMPLE) - message("-- Skipping build of DirectX 10 sample: User disabled") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") - else() - if(DirectX_D3D10_FOUND) - list(APPEND DIRECTX_SAMPLE_LIST "DirectX10 ") - else() - set(SKIP_DIRECTX10_SAMPLE ON) - message("-- Skipping build of DirectX 10 sample: Missing DirectX_D3D10_INCLUDE_DIR, DirectX_D3D10_LIBRARY or DirectX_D3DX10_LIBRARY") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") - endif() - endif() - - - if(DIRECTX_SAMPLE_LIST) - message("-- Enabled DirectX samples: " ${DIRECTX_SAMPLE_LIST}) - endif() - if(DIRECTX_SKIPPED_SAMPLE_LIST) - message("-- Disabled DirectX samples: " ${DIRECTX_SKIPPED_SAMPLE_LIST}) - endif() - else() - message("-- Determing if DirectX samples can be built - No") - set(SKIP_DIRECTX9_SAMPLE ON) - set(SKIP_DIRECTX10_SAMPLE ON) - endif() - endif() - elseif(APPLE) - include(FindCarbon) - find_package(Carbon REQUIRED) - - if (Carbon_FOUND) - include_directories(${Carbon_INCLUDE_DIR}) - list(APPEND sample_LIBRARIES ${Carbon_LIBRARIES}) - endif() - else() - find_package(X11 REQUIRED) - if (X11_FOUND) - list(APPEND sample_LIBRARIES ${X11_LIBRARIES}) - # shell/src/x11/InputX11.cpp:InitialiseX11Keymap uses Xkb if - # possible instead of XGetKeyboardMapping for performance - if(X11_Xkb_FOUND) - FIND_PACKAGE_MESSAGE(X11 "Found X11 KBlib: ${X11_X11_LIB}" "[${X11_X11_LIB}][${X11_XkbINCLUDE_DIR}]") - add_definitions(-DHAS_X11XKBLIB) - endif() - endif() - endif() - - set(SAMPLES_DIR opt/Rocket/Samples CACHE PATH "path to samples dir") - - # The samples and tutorials use the shell library - include_directories(${PROJECT_SOURCE_DIR}/Samples/shell/include) - - # Build and install sample shell library - add_library(shell STATIC ${shell_SRC_FILES} ${shell_HDR_FILES}) - if (APPLE) - # We only support i386 for the samples as it still uses Carbon - set_target_properties(shell PROPERTIES OSX_ARCHITECTURES "i386;") - endif() - - # Build and install the basic samples - foreach(sample ${samples}) - bl_sample(${sample} ${sample_LIBRARIES}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/${sample}) - install(TARGETS ${sample} - RUNTIME DESTINATION ${SAMPLES_DIR}/${sample} - BUNDLE DESTINATION ${SAMPLES_DIR}) - endforeach() - - if(WIN32) - if(NOT SKIP_DIRECTX9_SAMPLE) - include_directories(${DirectX_INCLUDE_DIR}) - - bl_sample(directx ${sample_LIBRARIES} ${DirectX_LIBRARY} ${DirectX_D3DX9_LIBRARY}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx) - install(TARGETS directx - RUNTIME DESTINATION ${SAMPLES_DIR}/directx - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - if(NOT SKIP_DIRECTX10_SAMPLE) - include_directories(${DirectX_INCLUDE_DIR} ${DirectX_D3D10_INCLUDE_DIRS}) - - bl_sample(directx10 ${sample_LIBRARIES} ${DirectX_D3D10_LIBRARIES}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx10) - install(TARGETS directx10 - RUNTIME DESTINATION ${SAMPLES_DIR}/directx10 - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - endif() - - message("-- Can SDL2 sample be built") - find_package(SDL) - if(SDL_FOUND) - find_package(SDL_image) - if(SDL_IMAGE_FOUND) - find_package(GLEW) - if(GLEW_FOUND) - message("-- Can SDL2 sample be built - yes") - include_directories(${SDL_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) - - bl_sample(sdl2 ${sample_LIBRARIES} ${SDL_LIBRARY} ${SDL_IMAGE_LIBRARY} ${GLEW_LIBRARY}) - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sdl2) - install(TARGETS sdl2 - RUNTIME DESTINATION ${SAMPLES_DIR}/sdl2 - BUNDLE DESTINATION ${SAMPLES_DIR}) - else() - message("-- Can SDL2 sample be built - GLEW not found") - endif() - else() - message("-- Can SDL2 sample be built - SDL2_image not found") - endif() - else() - message("-- Can SDL2 sample be built - SDL2 not found") - endif() - - - message("-- Can SFML 1.x sample be built") - find_package(SFML 1 COMPONENTS graphics window system) - if(NOT SFML_FOUND) - message("-- Can SFML 1.x sample be built - no") - elseif(SFML_VERSION_MAJOR GREATER 1) - message("-- Can SFML 1.x sample be built - no: Version 2 detected") - else() - message("-- Can SFML 1.x sample be built - yes") - - include_directories(${SFML_INCLUDE_DIR}) - - bl_sample(sfml ${sample_LIBRARIES} ${SFML_LIBRARIES}) - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml) - install(TARGETS sfml - RUNTIME DESTINATION ${SAMPLES_DIR}/sfml - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - message("-- Can SFML 2.x sample be built") - find_package(SFML 2 COMPONENTS graphics window system) - if(NOT SFML_FOUND) - message("-- Can SFML 2.x sample be built - no") - else() - find_package(GLEW) - if(GLEW_FOUND) - message("-- Can SFML 2.x sample be built - yes: with GLEW") - include_directories(${SFML_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) - add_definitions( -DENABLE_GLEW ) - bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES} ${GLEW_LIBRARY}) - else() - message("-- Can SFML 2.x sample be built - yes: without GLEW") - include_directories(${SFML_INCLUDE_DIR}) - bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES}) - endif() - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml2) - install(TARGETS sfml2 - RUNTIME DESTINATION ${SAMPLES_DIR}/sfml2 - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - # Build and install the tutorials - foreach(tutorial ${tutorials}) - bl_sample(${tutorial} ${sample_LIBRARIES}) - - # The tutorials always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/tutorial/${tutorial}) - install(TARGETS ${tutorial} - RUNTIME DESTINATION ${SAMPLES_DIR}/${tutorial} - BUNDLE DESTINATION ${SAMPLES_DIR}) - endforeach() - - # Build and install invaders sample - bl_sample(invaders ${sample_LIBRARIES}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/invaders) - install(TARGETS invaders - RUNTIME DESTINATION ${SAMPLES_DIR}/invaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - - if(BUILD_PYTHON_BINDINGS) - # Build and install pyinvaders sample - bl_sample(pyinvaders ${sample_LIBRARIES} ${PYTHON_LIBRARIES} ${PY_BINDINGS_LINK_LIBS}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/pyinvaders) - install(TARGETS pyinvaders - RUNTIME DESTINATION ${SAMPLES_DIR}/pyinvaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - if(BUILD_LUA_BINDINGS) - bl_sample(luainvaders RocketCoreLua RocketControlsLua ${sample_LIBRARIES} ${LUA_BINDINGS_LINK_LIBS}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/luainvaders) - install(TARGETS luainvaders - RUNTIME DESTINATION ${SAMPLES_DIR}/luainvaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() -endif() - - -#=================================== -# Installation ===================== -#=================================== - -if(BUILD_LUA_BINDINGS AND BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - ) -else() - if(NOT BUILD_LUA_BINDINGS AND NOT BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Python" EXCLUDE - PATTERN "Lua" EXCLUDE - ) - else() - if(BUILD_PYTHON_BINDINGS) - install(FILES ${PROJECT_SOURCE_DIR}/bin/rocket.py - DESTINATION ${PYTHON_INSTDIR} - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Lua" EXCLUDE - ) - else() - if(BUILD_LUA_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Python" EXCLUDE - ) - else() - message(FATAL_ERROR "ASSERT: Unexpected option combination, this is a logical impossibility.") - endif() - endif() - endif() -endif() - -if(BUILD_SAMPLES) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/assets - DESTINATION ${SAMPLES_DIR} - ) - - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/template/data - DESTINATION ${SAMPLES_DIR}/tutorial/template - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid/data - DESTINATION ${SAMPLES_DIR}/tutorial/datagrid - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid_tree/data - DESTINATION ${SAMPLES_DIR}/tutorial/datagrid_tree - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/tutorial_drag/data - DESTINATION ${SAMPLES_DIR}/tutorial/tutorial_drag - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/treeview/data - DESTINATION ${SAMPLES_DIR}/basic/treeview - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/drag/data - DESTINATION ${SAMPLES_DIR}/basic/drag - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/invaders/data - DESTINATION ${SAMPLES_DIR}/invaders - ) - - if(BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/pyinvaders/data - DESTINATION ${SAMPLES_DIR}/pyinvaders - ) - endif() - - if(BUILD_LUA_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/data - DESTINATION ${SAMPLES_DIR}/luainvaders - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/lua - DESTINATION ${SAMPLES_DIR}/luainvaders - ) - endif() -endif() diff --git a/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt deleted file mode 100644 index f4493c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt +++ /dev/null @@ -1,2 +0,0 @@ -add_subdirectory(Build) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt deleted file mode 100644 index 4ccb85d..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt +++ /dev/null @@ -1,26 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) -project(noise CXX) - -# http://sourceforge.net/projects/libnoise - -file(GLOB SOURCE_CODE src/*.cpp src/*.h src/model/*.cpp src/model/*.h src/module/*.cpp src/module/*.h) -include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) -include_directories(${CMAKE_CURRENT_SOURCE_DIR}/etc) -add_library(${PACKAGE} SHARED ${SOURCE_CODE}) - -#IF(MSVC) -# add_definitions(/nologo /c /D_CRT_SECURE_NO_DEPRECATE) -#ENDIF() - -file(GLOB HEADER_CODE src/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}") - -file(GLOB HEADER_CODE src/model/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}/model") - -file(GLOB HEADER_CODE src/module/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}/module") diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp deleted file mode 100644 index 08c2a9f..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp +++ /dev/null @@ -1,1147 +0,0 @@ -#include "OIS.h" - -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -////////////////////////////////////Needed Windows Headers//////////// -#if defined OIS_WIN32_PLATFORM -# define WIN32_LEAN_AND_MEAN -# include "windows.h" -# include "resource.h" - -////////////////////////////////////Needed Linux Headers////////////// -#elif defined OIS_LINUX_PLATFORM -# include -# include -#else -# error Sorry, not yet implemented on this platform. -#endif - - -using namespace OIS; - -#if defined OIS_WIN32_PLATFORM - -// The dialog proc we have to give to CreateDialog -LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) -{ - return FALSE; -} - -#endif - -//////////// Event handler class declaration //////////////////////////////////////////////// -class Application; -class JoystickManager; -class EffectManager; - -class EventHandler : public KeyListener, public JoyStickListener -{ - protected: - - Application* _pApplication; - JoystickManager* _pJoystickMgr; - EffectManager* _pEffectMgr; - - public: - - EventHandler(Application* pApp); - void initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr); - - bool keyPressed( const KeyEvent &arg ); - bool keyReleased( const KeyEvent &arg ); - - bool buttonPressed( const JoyStickEvent &arg, int button ); - bool buttonReleased( const JoyStickEvent &arg, int button ); - - bool axisMoved( const JoyStickEvent &arg, int axis ); - - bool povMoved( const JoyStickEvent &arg, int pov ); -}; - -//////////// Variable classes //////////////////////////////////////////////////////// - -class Variable -{ - protected: - - double _dInitValue; - double _dValue; - - public: - - Variable(double dInitValue) : _dInitValue(dInitValue) { reset(); } - - double getValue() const { return _dValue; } - - void reset() { _dValue = _dInitValue; } - - virtual void setValue(double dValue) { _dValue = dValue; } - - virtual string toString() const - { - ostringstream oss; - oss << _dValue; - return oss.str(); - } - - virtual void update() {}; -}; - -class Constant : public Variable -{ - public: - - Constant(double dInitValue) : Variable(dInitValue) {} - - virtual void setValue(double dValue) { } - -}; - -class LimitedVariable : public Variable -{ - protected: - - double _dMinValue; - double _dMaxValue; - - public: - - LimitedVariable(double dInitValue, double dMinValue, double dMaxValue) - : _dMinValue(dMinValue), _dMaxValue(dMaxValue), Variable(dInitValue) - {} - - virtual void setValue(double dValue) - { - _dValue = dValue; - if (_dValue > _dMaxValue) - _dValue = _dMaxValue; - else if (_dValue < _dMinValue) - _dValue = _dMinValue; - } - -/* virtual string toString() const - { - ostringstream oss; - oss << setiosflags(ios_base::right) << setw(4) - << (int)(200.0 * getValue()/(_dMaxValue - _dMinValue)); // [-100%, +100%] - return oss.str(); - }*/ -}; - -class TriangleVariable : public LimitedVariable -{ - protected: - - double _dDeltaValue; - - public: - - TriangleVariable(double dInitValue, double dDeltaValue, double dMinValue, double dMaxValue) - : LimitedVariable(dInitValue, dMinValue, dMaxValue), _dDeltaValue(dDeltaValue) {}; - - virtual void update() - { - double dValue = getValue() + _dDeltaValue; - if (dValue > _dMaxValue) - { - dValue = _dMaxValue; - _dDeltaValue = -_dDeltaValue; - //cout << "Decreasing variable towards " << _dMinValue << endl; - } - else if (dValue < _dMinValue) - { - dValue = _dMinValue; - _dDeltaValue = -_dDeltaValue; - //cout << "Increasing variable towards " << _dMaxValue << endl; - } - setValue(dValue); - //cout << "TriangleVariable::update : delta=" << _dDeltaValue << ", value=" << dValue << endl; - } -}; - -//////////// Variable effect class ////////////////////////////////////////////////////////// - -typedef map MapVariables; -typedef void (*EffectVariablesApplier)(MapVariables& mapVars, Effect* pEffect); - -class VariableEffect -{ - protected: - - // Effect description - const char* _pszDesc; - - // The associate OIS effect - Effect* _pEffect; - - // The effect variables. - MapVariables _mapVariables; - - // The effect variables applier function. - EffectVariablesApplier _pfApplyVariables; - - // True if the effect is currently being played. - bool _bActive; - - public: - - VariableEffect(const char* pszDesc, Effect* pEffect, - const MapVariables& mapVars, const EffectVariablesApplier pfApplyVars) - : _pszDesc(pszDesc), _pEffect(pEffect), - _mapVariables(mapVars), _pfApplyVariables(pfApplyVars), _bActive(false) - {} - - ~VariableEffect() - { - if (_pEffect) - delete _pEffect; - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - if (iterVars->second) - delete iterVars->second; - - } - - void setActive(bool bActive = true) - { - reset(); - _bActive = bActive; - } - - bool isActive() - { - return _bActive; - } - - Effect* getFFEffect() - { - return _pEffect; - } - - const char* getDescription() const - { - return _pszDesc; - } - - void update() - { - if (isActive()) - { - // Update the variables. - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - iterVars->second->update(); - - // Apply the updated variable values to the effect. - _pfApplyVariables(_mapVariables, _pEffect); - } - } - - void reset() - { - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - iterVars->second->reset(); - _pfApplyVariables(_mapVariables, _pEffect); - } - - string toString() const - { - string str; - MapVariables::const_iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - str += iterVars->first + ":" + iterVars->second->toString() + " "; - return str; - } -}; - -//////////// Joystick manager class //////////////////////////////////////////////////////// - -class JoystickManager -{ - protected: - - // Input manager. - InputManager* _pInputMgr; - - // Vectors to hold joysticks and associated force feedback devices - vector _vecJoys; - vector _vecFFDev; - - // Selected joystick - int _nCurrJoyInd; - - // Force feedback detected ? - bool _bFFFound; - - // Selected joystick master gain. - float _dMasterGain; - - // Selected joystick auto-center mode. - bool _bAutoCenter; - - public: - - JoystickManager(InputManager* pInputMgr, EventHandler* pEventHdlr) - : _pInputMgr(pInputMgr), _nCurrJoyInd(-1), _dMasterGain(0.5), _bAutoCenter(true) - - { - _bFFFound = false; - for( int nJoyInd = 0; nJoyInd < pInputMgr->getNumberOfDevices(OISJoyStick); ++nJoyInd ) - { - //Create the stick - JoyStick* pJoy = (JoyStick*)pInputMgr->createInputObject( OISJoyStick, true ); - cout << endl << "Created buffered joystick #" << nJoyInd << " '" << pJoy->vendor() - << "' (Id=" << pJoy->getID() << ")"; - - // Check for FF, and if so, keep the joy and dump FF info - ForceFeedback* pFFDev = (ForceFeedback*)pJoy->queryInterface(Interface::ForceFeedback ); - if( pFFDev ) - { - _bFFFound = true; - - // Keep the joy to play with it. - pJoy->setEventCallback(pEventHdlr); - _vecJoys.push_back(pJoy); - - // Keep also the associated FF device - _vecFFDev.push_back(pFFDev); - - // Dump FF supported effects and other info. - cout << endl << " * Number of force feedback axes : " - << pFFDev->getFFAxesNumber() << endl; - const ForceFeedback::SupportedEffectList &lstFFEffects = - pFFDev->getSupportedEffects(); - if (lstFFEffects.size() > 0) - { - cout << " * Supported effects :"; - ForceFeedback::SupportedEffectList::const_iterator itFFEff; - for(itFFEff = lstFFEffects.begin(); itFFEff != lstFFEffects.end(); ++itFFEff) - cout << " " << Effect::getEffectTypeName(itFFEff->second); - cout << endl << endl; - } - else - cout << "Warning: no supported effect found !" << endl; - } - else - { - cout << " (no force feedback support detected) => ignored." << endl << endl; - _pInputMgr->destroyInputObject(pJoy); - } - } - } - - ~JoystickManager() - { - for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) - _pInputMgr->destroyInputObject( _vecJoys[nJoyInd] ); - } - - size_t getNumberOfJoysticks() const - { - return _vecJoys.size(); - } - - bool wasFFDetected() const - { - return _bFFFound; - } - - enum EWhichJoystick { ePrevious=-1, eNext=+1 }; - - void selectJoystick(EWhichJoystick eWhich) - { - // Note: Reset the master gain to half the maximum and autocenter mode to Off, - // when really selecting a new joystick. - if (_nCurrJoyInd < 0) - { - _nCurrJoyInd = 0; - _dMasterGain = 0.5; // Half the maximum. - changeMasterGain(0.0); - } - else - { - _nCurrJoyInd += eWhich; - if (_nCurrJoyInd < -1 || _nCurrJoyInd >= (int)_vecJoys.size()) - _nCurrJoyInd = -1; - if (_vecJoys.size() > 1 && _nCurrJoyInd >= 0) - { - _dMasterGain = 0.5; // Half the maximum. - changeMasterGain(0.0); - } - } - } - - ForceFeedback* getCurrentFFDevice() - { - return (_nCurrJoyInd >= 0) ? _vecFFDev[_nCurrJoyInd] : 0; - } - - void changeMasterGain(float dDeltaPercent) - { - if (_nCurrJoyInd >= 0) - { - _dMasterGain += dDeltaPercent / 100; - if (_dMasterGain > 1.0) - _dMasterGain = 1.0; - else if (_dMasterGain < 0.0) - _dMasterGain = 0.0; - - _vecFFDev[_nCurrJoyInd]->setMasterGain(_dMasterGain); - } - } - - enum EAutoCenterHow { eOff, eOn, eToggle }; - - void changeAutoCenter(EAutoCenterHow eHow = eToggle) - { - if (_nCurrJoyInd >= 0) - { - if (eHow == eToggle) - _bAutoCenter = !_bAutoCenter; - else - _bAutoCenter = (eHow == eOn ? true : false); - _vecFFDev[_nCurrJoyInd]->setAutoCenterMode(_bAutoCenter); - } - } - - void captureEvents() - { - // This fires off buffered events for each joystick we have - for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) - if( _vecJoys[nJoyInd] ) - _vecJoys[nJoyInd]->capture(); - } - - string toString() const - { - // Warning: Wrong result if more than 10 joysticks ... - ostringstream oss; - oss << "Joy:" << (_nCurrJoyInd >= 0 ? (char)('0' + _nCurrJoyInd) : '-'); - oss << " Gain:" << setiosflags(ios_base::right) << setw(3) << (int)(_dMasterGain*100); - oss << "% Center:" << (_bAutoCenter ? " On " : "Off"); - return oss.str(); - } -}; - -//////////// Effect variables applier functions ///////////////////////////////////////////// -// These functions apply the given Variables to the given OIS::Effect - -// Variable force "Force" + optional "AttackFactor" constant, on a OIS::ConstantEffect -void forceVariableApplier(MapVariables& mapVars, Effect* pEffect) -{ - double dForce = mapVars["Force"]->getValue(); - double dAttackFactor = 1.0; - if (mapVars.find("AttackFactor") != mapVars.end()) - dAttackFactor = mapVars["AttackFactor"]->getValue(); - - ConstantEffect* pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = (int)dForce; - pConstForce->envelope.attackLevel = (unsigned short)fabs(dForce*dAttackFactor); - pConstForce->envelope.fadeLevel = (unsigned short)fabs(dForce); // Fade never reached, in fact. -} - -// Variable "Period" on an OIS::PeriodicEffect -void periodVariableApplier(MapVariables& mapVars, Effect* pEffect) -{ - double dPeriod = mapVars["Period"]->getValue(); - - PeriodicEffect* pPeriodForce = dynamic_cast(pEffect->getForceEffect()); - pPeriodForce->period = (unsigned int)dPeriod; -} - - -//////////// Effect manager class ////////////////////////////////////////////////////////// - -class EffectManager -{ - protected: - - // The joystick manager - JoystickManager* _pJoystickMgr; - - // Vector to hold variable effects - vector _vecEffects; - - // Selected effect - int _nCurrEffectInd; - - // Update frequency (Hz) - unsigned int _nUpdateFreq; - - // Indexes (in _vecEffects) of the variable effects that are playable by the selected joystick. - vector _vecPlayableEffectInd; - - - public: - - EffectManager(JoystickManager* pJoystickMgr, unsigned int nUpdateFreq) - : _pJoystickMgr(pJoystickMgr), _nUpdateFreq(nUpdateFreq), _nCurrEffectInd(-1) - { - Effect* pEffect; - MapVariables mapVars; - ConstantEffect* pConstForce; - PeriodicEffect* pPeriodForce; - - // Please don't modify or remove effects (unless there is some bug ...) : - // add new ones to enhance the test repository. - // And feel free to add any tested device, even when the test failed ! - // Tested devices capabilities : - // - Logitech G25 Racing wheel : - // * Only 1 axis => no directional 2D effect (only left and right) - // * Full support for constant force under WinXPSP2DX9 and Linux 2.6.22.9 - // * Full support for periodic forces under WinXPSP2DX9 - // (but poor rendering under 20ms period), and no support under Linux 2.6.22.9 - // * Full support reported (not tested) for all other forces under WinXPSP2DX9, - // and no support under Linux 2.6.22.9 - // - Logitech Rumble pad 2 : - // * Only 1 axis => no directional 2D effect (only left and right) - // * Forces amplitude is rendered through the inertia motors rotation frequency - // (stronger force => quicker rotation) - // * 2 inertia motors : 1 with small inertia, 1 with "heavy" one. - // => poor force feedback rendering ... - // * Support (poor) for all OIS forces under WinXPSP2DX9, - // and only for Triangle, Square and Sine periodic forces under Linux 2.6.22.9 - // (reported by enumeration, but does not seem to work actually) - // Master gain setting tests: - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // Auto-center mode setting tests: - // - Logitech G25 Racing wheel : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. - // - Logitech Rumble pad 2 : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. - - // 1) Constant force on 1 axis with 20s-period triangle oscillations in [-10K, +10K]. - // Notes: Linux: replay_length: no way to get it to work if not 0 or Effect::OIS_INFINITE - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (but only light motor involved), - // Linux2.6.22.9=Not supported - pEffect = new Effect(Effect::ConstantForce, Effect::Constant); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; // Linux/Win32: Same behaviour as 0. - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = 5000; //-10K to +10k - pConstForce->envelope.attackLength = 0; - pConstForce->envelope.attackLevel = (unsigned short)pConstForce->level; - pConstForce->envelope.fadeLength = 0; - pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; - - mapVars.clear(); - mapVars["Force"] = - new TriangleVariable(0.0, // F0 - 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle - -10000.0, // Fmin - 10000.0); // Fmax - mapVars["AttackFactor"] = new Constant(1.0); - - _vecEffects.push_back - (new VariableEffect - ("Constant force on 1 axis with 20s-period triangle oscillations " - "of its signed amplitude in [-10K, +10K]", - pEffect, mapVars, forceVariableApplier)); - - // 2) Constant force on 1 axis with noticeable attack - // with 20s-period triangle oscillations in [-10K, +10K]. - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (including attack, but only light motor involved), - // Linux2.6.22.9=Not supported. - pEffect = new Effect(Effect::ConstantForce, Effect::Constant); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; //(unsigned int)(1000000.0/_nUpdateFreq); // Linux: Does not work. - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = 5000; //-10K to +10k - pConstForce->envelope.attackLength = (unsigned int)(1000000.0/_nUpdateFreq/2); - pConstForce->envelope.attackLevel = (unsigned short)(pConstForce->level*0.1); - pConstForce->envelope.fadeLength = 0; // Never reached, actually. - pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; // Idem - - mapVars.clear(); - mapVars["Force"] = - new TriangleVariable(0.0, // F0 - 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle - -10000.0, // Fmin - 10000.0); // Fmax - mapVars["AttackFactor"] = new Constant(0.1); - - _vecEffects.push_back - (new VariableEffect - ("Constant force on 1 axis with noticeable attack (app update period / 2)" - "and 20s-period triangle oscillations of its signed amplitude in [-10K, +10K]", - pEffect, mapVars, forceVariableApplier)); - - // 3) Triangle periodic force on 1 axis with 40s-period triangle oscillations - // of its period in [10, 400] ms, and constant amplitude - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK but only light motor involved, - // Linux2.6.22.9=Failed. - pEffect = new Effect(Effect::PeriodicForce, Effect::Triangle); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pPeriodForce = dynamic_cast(pEffect->getForceEffect()); - pPeriodForce->magnitude = 10000; // 0 to +10k - pPeriodForce->offset = 0; - pPeriodForce->phase = 0; // 0 to 35599 - pPeriodForce->period = 10000; // Micro-seconds - pPeriodForce->envelope.attackLength = 0; - pPeriodForce->envelope.attackLevel = (unsigned short)pPeriodForce->magnitude; - pPeriodForce->envelope.fadeLength = 0; - pPeriodForce->envelope.fadeLevel = (unsigned short)pPeriodForce->magnitude; - - mapVars.clear(); - mapVars["Period"] = - new TriangleVariable(1*1000.0, // P0 - 4*(400-10)*1000.0/_nUpdateFreq / 40.0, // dP for a 40s-period triangle - 10*1000.0, // Pmin - 400*1000.0); // Pmax - _vecEffects.push_back - (new VariableEffect - ("Periodic force on 1 axis with 40s-period triangle oscillations " - "of its period in [10, 400] ms, and constant amplitude", - pEffect, mapVars, periodVariableApplier)); - - } - - ~EffectManager() - { - vector::iterator iterEffs; - for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) - delete *iterEffs; - } - - void updateActiveEffects() - { - vector::iterator iterEffs; - for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) - if ((*iterEffs)->isActive()) - { - (*iterEffs)->update(); - _pJoystickMgr->getCurrentFFDevice()->modify((*iterEffs)->getFFEffect()); - } - } - - void checkPlayableEffects() - { - // Nothing to do if no joystick currently selected - if (!_pJoystickMgr->getCurrentFFDevice()) - return; - - // Get the list of indexes of effects that the selected device can play - _vecPlayableEffectInd.clear(); - for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) - { - const Effect::EForce eForce = _vecEffects[nEffInd]->getFFEffect()->force; - const Effect::EType eType = _vecEffects[nEffInd]->getFFEffect()->type; - if (_pJoystickMgr->getCurrentFFDevice()->supportsEffect(eForce, eType)) - { - _vecPlayableEffectInd.push_back(nEffInd); - } - } - - // Print details about playable effects - if (_vecPlayableEffectInd.empty()) - { - cout << endl << endl << "The device can't play any effect of the test set" << endl; - } - else - { - cout << endl << endl << "Selected device can play the following effects :" << endl; - for (size_t nEffIndInd = 0; nEffIndInd < _vecPlayableEffectInd.size(); nEffIndInd++) - printEffect(_vecPlayableEffectInd[nEffIndInd]); - cout << endl; - } - } - - enum EWhichEffect { ePrevious=-1, eNone=0, eNext=+1 }; - - void selectEffect(EWhichEffect eWhich) - { - - // Nothing to do if no joystick currently selected - if (!_pJoystickMgr->getCurrentFFDevice()) - { - cout << "\nNo Joystick selected.\n"; - return; - } - - // Nothing to do if joystick cannot play any effect - if (_vecPlayableEffectInd.empty()) - { - cout << "\nNo playable effects.\n"; - return; - } - - // If no effect selected, and next or previous requested, select the first one. - if (eWhich != eNone && _nCurrEffectInd < 0) - _nCurrEffectInd = 0; - - // Otherwise, remove the current one from the device, - // and then select the requested one if any. - else if (_nCurrEffectInd >= 0) - { - _pJoystickMgr->getCurrentFFDevice() - ->remove(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); - _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(false); - _nCurrEffectInd += eWhich; - if (_nCurrEffectInd < -1 || _nCurrEffectInd >= (int)_vecPlayableEffectInd.size()) - _nCurrEffectInd = -1; - } - - // If no effect must be selected, reset the selection index - if (eWhich == eNone) - { - _nCurrEffectInd = -1; - } - - // Otherwise, upload the new selected effect to the device if any. - else if (_nCurrEffectInd >= 0) - { - _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(true); - _pJoystickMgr->getCurrentFFDevice() - ->upload(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); - } - } - - void printEffect(size_t nEffInd) - { - cout << "* #" << nEffInd << " : " << _vecEffects[nEffInd]->getDescription() << endl; - } - - void printEffects() - { - for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) - printEffect(nEffInd); - } - - string toString() const - { - ostringstream oss; - oss << "DevMem: " << setiosflags(ios_base::right) << setw(3); - - //This causes constant exceptions with my device. Not needed for anything other than debugging - //if (_pJoystickMgr->getCurrentFFDevice()) - // oss << _pJoystickMgr->getCurrentFFDevice()->getFFMemoryLoad() << "%"; - //else - // oss << "----"; - - oss << " Effect:" << setw(2); - if (_nCurrEffectInd >= 0) - oss << _vecPlayableEffectInd[_nCurrEffectInd] - << " " << _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->toString(); - else - oss << "--"; - return oss.str(); - } -}; - -//////////// Application class //////////////////////////////////////////////////////// - -class Application -{ - protected: - InputManager* _pInputMgr; - EventHandler* _pEventHdlr; - Keyboard* _pKeyboard; - JoystickManager* _pJoystickMgr; - EffectManager* _pEffectMgr; - -#if defined OIS_WIN32_PLATFORM - HWND _hWnd; -#elif defined OIS_LINUX_PLATFORM - Display* _pXDisp; - Window _xWin; -#endif - - bool _bMustStop; - bool _bIsInitialized; - - int _nStatus; - - // App. hart beat frequency. - static const unsigned int _nHartBeatFreq = 20; // Hz - - // Effects update frequency (Hz) : Needs to be quite lower than app. hart beat frequency, - // if we want to be able to calmly study effect changes ... - static const unsigned int _nEffectUpdateFreq = 1; // Hz - - public: - - Application(int argc, const char* argv[]) - { - _pInputMgr = 0; - _pEventHdlr = 0; - _pKeyboard = 0; - _pJoystickMgr = 0; - _pEffectMgr = 0; - -#if defined OIS_WIN32_PLATFORM - _hWnd = 0; -#elif defined OIS_LINUX_PLATFORM - _pXDisp = 0; - _xWin = 0; -#endif - - _bMustStop = false; - - _bIsInitialized = false; - _nStatus = 0; - } - - int initialize() - { - ostringstream wnd; - -#if defined OIS_WIN32_PLATFORM - - //Create a capture window for Input Grabbing - _hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); - if( _hWnd == NULL ) - OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); - - ShowWindow(_hWnd, SW_SHOW); - - wnd << (size_t)_hWnd; - -#elif defined OIS_LINUX_PLATFORM - - //Connects to default X window - if( !(_pXDisp = XOpenDisplay(0)) ) - OIS_EXCEPT(E_General, "Error opening X!"); - - //Create a window - _xWin = XCreateSimpleWindow(_pXDisp,DefaultRootWindow(_pXDisp), 0,0, 100,100, 0, 0, 0); - - //bind our connection to that window - XMapWindow(_pXDisp, _xWin); - - //Select what events we want to listen to locally - XSelectInput(_pXDisp, _xWin, StructureNotifyMask); - - //Wait for Window to show up - XEvent event; - do { XNextEvent(_pXDisp, &event); } while(event.type != MapNotify); - - wnd << _xWin; - -#endif - - // Create OIS input manager - ParamList pl; - pl.insert(make_pair(string("WINDOW"), wnd.str())); - _pInputMgr = InputManager::createInputSystem(pl); - cout << _pInputMgr->inputSystemName() << " created." << endl; - - // Create the event handler. - _pEventHdlr = new EventHandler(this); - - // Create a simple keyboard - _pKeyboard = (Keyboard*)_pInputMgr->createInputObject( OISKeyboard, true ); - _pKeyboard->setEventCallback( _pEventHdlr ); - - // Create the joystick manager. - _pJoystickMgr = new JoystickManager(_pInputMgr, _pEventHdlr); - if( !_pJoystickMgr->wasFFDetected() ) - { - cout << "No Force Feedback device detected." << endl; - _nStatus = 1; - return _nStatus; - } - - // Create force feedback effect manager. - _pEffectMgr = new EffectManager(_pJoystickMgr, _nEffectUpdateFreq); - - // Initialize the event handler. - _pEventHdlr->initialize(_pJoystickMgr, _pEffectMgr); - - _bIsInitialized = true; - - return _nStatus; - } - -#if defined OIS_LINUX_PLATFORM - - // This is just here to show that you still receive x11 events, - // as the lib only needs mouse/key events - void checkX11Events() - { - XEvent event; - - //Poll x11 for events - while( XPending(_pXDisp) > 0 ) - { - XNextEvent(_pXDisp, &event); - } - } -#endif - - int run() - { - const unsigned int nMaxEffectUpdateCnt = _nHartBeatFreq / _nEffectUpdateFreq; - unsigned int nEffectUpdateCnt = 0; - - // Initailize app. if not already done, and exit if something went wrong. - if (!_bIsInitialized) - initialize(); - - if (!_bIsInitialized) - return _nStatus; - - try - { - //Main polling loop - while(!_bMustStop) - { - // This fires off buffered events for keyboards - _pKeyboard->capture(); - - // This fires off buffered events for each joystick we have - _pJoystickMgr->captureEvents(); - - // Update currently selected effects if time has come to. - if (!nEffectUpdateCnt) - { - _pEffectMgr->updateActiveEffects(); - nEffectUpdateCnt = nMaxEffectUpdateCnt; - } - else - nEffectUpdateCnt--; - - // Update state line. - cout << "\r" << _pJoystickMgr->toString() << " " << _pEffectMgr->toString() - << " "; - - //Throttle down CPU usage & handle OS events -#if defined OIS_WIN32_PLATFORM - Sleep( (DWORD)(1000.0/_nHartBeatFreq) ); - MSG msg; - while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) - { - TranslateMessage( &msg ); - DispatchMessage( &msg ); - } -#elif defined OIS_LINUX_PLATFORM - checkX11Events(); - usleep(1000000.0/_nHartBeatFreq); -#endif - } - } - catch( const Exception &ex ) - { -#if defined OIS_WIN32_PLATFORM - MessageBox(0, ex.eText, "Exception Raised!", MB_OK); -#else - cout << endl << "OIS Exception Caught!" << endl - << "\t" << ex.eText << "[Line " << ex.eLine << " in " << ex.eFile << "]" << endl; -#endif - } - - terminate(); - - return _nStatus; - } - - void stop() - { - _bMustStop = true; - } - - void terminate() - { - if (_pInputMgr) - { - _pInputMgr->destroyInputObject( _pKeyboard ); - _pKeyboard = 0; - if (_pJoystickMgr) - { - delete _pJoystickMgr; - _pJoystickMgr = 0; - } - InputManager::destroyInputSystem(_pInputMgr); - _pInputMgr = 0; - } - if (_pEffectMgr) - { - delete _pEffectMgr; - _pEffectMgr = 0; - } - if (_pEventHdlr) - { - delete _pEventHdlr; - _pEventHdlr = 0; - } - -#if defined OIS_LINUX_PLATFORM - // Be nice to X and clean up the x window - XDestroyWindow(_pXDisp, _xWin); - XCloseDisplay(_pXDisp); -#endif - } - - JoystickManager* getJoystickManager() - { - return _pJoystickMgr; - } - - EffectManager* getEffectManager() - { - return _pEffectMgr; - } - - void printHelp() - { - cout << endl - << "Keyboard actions :" << endl - << "* Escape : Exit App" << endl - << "* H : This help menu" << endl - << "* Right/Left : Select next/previous joystick among the FF capable detected ones" << endl - << "* Up/Down : Select next/previous effect for the selected joystick" << endl - << "* PgUp/PgDn : Increase/decrease from 5% the master gain " - << "for all the joysticks" << endl - << "* Space : Toggle auto-centering on all the joysticks" << endl; - if (_bIsInitialized) - { - cout << endl << "Implemented effects :" << endl << endl; - _pEffectMgr->printEffects(); - cout << endl; - } - } -}; - -//////////// Event handler class definition //////////////////////////////////////////////// - -EventHandler::EventHandler(Application* pApp) -: _pApplication(pApp) -{} - -void EventHandler::initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr) -{ - _pJoystickMgr = pJoystickMgr; - _pEffectMgr = pEffectMgr; -} - -bool EventHandler::keyPressed( const KeyEvent &arg ) -{ - switch (arg.key) - { - // Quit. - case KC_ESCAPE: - _pApplication->stop(); - break; - - // Help. - case KC_H: - _pApplication->printHelp(); - break; - - // Change current joystick. - case KC_RIGHT: - _pEffectMgr->selectEffect(EffectManager::eNone); - _pJoystickMgr->selectJoystick(JoystickManager::eNext); - _pEffectMgr->checkPlayableEffects(); - break; - case KC_LEFT: - _pEffectMgr->selectEffect(EffectManager::eNone); - _pJoystickMgr->selectJoystick(JoystickManager::ePrevious); - _pEffectMgr->checkPlayableEffects(); - break; - - // Change current effect. - case KC_UP: - _pEffectMgr->selectEffect(EffectManager::eNext); - break; - case KC_DOWN: - _pEffectMgr->selectEffect(EffectManager::ePrevious); - break; - - // Change current master gain. - case KC_PGUP: - _pJoystickMgr->changeMasterGain(5.0); // Percent - break; - case KC_PGDOWN: - _pJoystickMgr->changeMasterGain(-5.0); // Percent - break; - - // Toggle auto-center mode. - case KC_SPACE: - _pJoystickMgr->changeAutoCenter(); - break; - - default: - cout << "Non mapped key: " << arg.key << endl; - } - return true; -} - -bool EventHandler::keyReleased( const KeyEvent &arg ) -{ - return true; -} - -bool EventHandler::buttonPressed( const JoyStickEvent &arg, int button ) -{ - return true; -} -bool EventHandler::buttonReleased( const JoyStickEvent &arg, int button ) -{ - return true; -} -bool EventHandler::axisMoved( const JoyStickEvent &arg, int axis ) -{ - return true; -} -bool EventHandler::povMoved( const JoyStickEvent &arg, int pov ) -{ - return true; -} - -//========================================================================================== -int main(int argc, const char* argv[]) -{ - - cout << endl - << "This is a simple command line Force Feedback testing demo ..." << endl - << "All connected joystick devices will be created and if FF Support is found," << endl - << "you'll be able to play some predefined variable effects on them." << endl << endl - << "Note: 1 effect can be played on 1 joystick at a time for the moment." << endl << endl; - - Application app(argc, argv); - - int status = app.initialize(); - - if (!status) - { - app.printHelp(); - - status = app.run(); - } - - cout << endl << endl << "Exiting ..." << endl << endl; - -#if defined OIS_WIN32_PLATFORM && _DEBUG - cout << "Click on this window and ..." << endl; - system("pause"); -#endif - - exit(status); -} diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am deleted file mode 100644 index 926f7f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am +++ /dev/null @@ -1,11 +0,0 @@ -INCLUDES = $(STLPORT_CFLAGS) -I$(top_srcdir)/includes $(CFLAGS) -I/usr/X11R6/include - -noinst_PROGRAMS = ConsoleApp FFConsoleTest - -ConsoleApp_SOURCES = OISConsole.cpp -ConsoleApp_LDFLAGS = -L$(top_builddir)/src -ConsoleApp_LDADD = -lOIS -lX11 -lXext - -FFConsoleTest_SOURCES = FFConsoleDemo.cpp -FFConsoleTest_LDFLAGS = -L$(top_builddir)/src -FFConsoleTest_LDADD = -lOIS -lX11 -lXext diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp deleted file mode 100644 index 0850004..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp +++ /dev/null @@ -1,459 +0,0 @@ -//////////////////////////////// OS Nuetral Headers //////////////// -#include "OISInputManager.h" -#include "OISException.h" -#include "OISKeyboard.h" -#include "OISMouse.h" -#include "OISJoyStick.h" -#include "OISEvents.h" - -//Advanced Usage -#include "OISForceFeedback.h" - -#include -#include -#include - -////////////////////////////////////Needed Windows Headers//////////// -#if defined OIS_WIN32_PLATFORM -# define WIN32_LEAN_AND_MEAN -# include "windows.h" -# ifdef min -# undef min -# endif -# include "resource.h" - LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ); -////////////////////////////////////////////////////////////////////// -////////////////////////////////////Needed Linux Headers////////////// -#elif defined OIS_LINUX_PLATFORM -# include -# include - void checkX11Events(); -////////////////////////////////////////////////////////////////////// -////////////////////////////////////Needed Mac Headers////////////// -#elif defined OIS_APPLE_PLATFORM -# include - void checkMacEvents(); -#endif -////////////////////////////////////////////////////////////////////// -using namespace OIS; - -//-- Some local prototypes --// -void doStartup(); -void handleNonBufferedKeys(); -void handleNonBufferedMouse(); -void handleNonBufferedJoy( JoyStick* js ); - -//-- Easy access globals --// -bool appRunning = true; //Global Exit Flag - -const char *g_DeviceType[6] = {"OISUnknown", "OISKeyboard", "OISMouse", "OISJoyStick", - "OISTablet", "OISOther"}; - -InputManager *g_InputManager = 0; //Our Input System -Keyboard *g_kb = 0; //Keyboard Device -Mouse *g_m = 0; //Mouse Device -JoyStick* g_joys[4] = {0,0,0,0}; //This demo supports up to 4 controllers - -//-- OS Specific Globals --// -#if defined OIS_WIN32_PLATFORM - HWND hWnd = 0; -#elif defined OIS_LINUX_PLATFORM - Display *xDisp = 0; - Window xWin = 0; -#elif defined OIS_APPLE_PLATFORM - WindowRef mWin = 0; -#endif - -//////////// Common Event handler class //////// -class EventHandler : public KeyListener, public MouseListener, public JoyStickListener -{ -public: - EventHandler() {} - ~EventHandler() {} - bool keyPressed( const KeyEvent &arg ) { - std::cout << " KeyPressed {" << arg.key - << ", " << ((Keyboard*)(arg.device))->getAsString(arg.key) - << "} || Character (" << (char)arg.text << ")" << std::endl; - return true; - } - bool keyReleased( const KeyEvent &arg ) { - if( arg.key == KC_ESCAPE || arg.key == KC_Q ) - appRunning = false; - std::cout << "KeyReleased {" << ((Keyboard*)(arg.device))->getAsString(arg.key) << "}\n"; - return true; - } - bool mouseMoved( const MouseEvent &arg ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouseMoved: Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool mousePressed( const MouseEvent &arg, MouseButtonID id ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouse button #" << id << " pressed. Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool mouseReleased( const MouseEvent &arg, MouseButtonID id ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouse button #" << id << " released. Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool buttonPressed( const JoyStickEvent &arg, int button ) { - std::cout << std::endl << arg.device->vendor() << ". Button Pressed # " << button; - return true; - } - bool buttonReleased( const JoyStickEvent &arg, int button ) { - std::cout << std::endl << arg.device->vendor() << ". Button Released # " << button; - return true; - } - bool axisMoved( const JoyStickEvent &arg, int axis ) - { - //Provide a little dead zone - if( arg.state.mAxes[axis].abs > 2500 || arg.state.mAxes[axis].abs < -2500 ) - std::cout << std::endl << arg.device->vendor() << ". Axis # " << axis << " Value: " << arg.state.mAxes[axis].abs; - return true; - } - bool povMoved( const JoyStickEvent &arg, int pov ) - { - std::cout << std::endl << arg.device->vendor() << ". POV" << pov << " "; - - if( arg.state.mPOV[pov].direction & Pov::North ) //Going up - std::cout << "North"; - else if( arg.state.mPOV[pov].direction & Pov::South ) //Going down - std::cout << "South"; - - if( arg.state.mPOV[pov].direction & Pov::East ) //Going right - std::cout << "East"; - else if( arg.state.mPOV[pov].direction & Pov::West ) //Going left - std::cout << "West"; - - if( arg.state.mPOV[pov].direction == Pov::Centered ) //stopped/centered out - std::cout << "Centered"; - return true; - } - - bool vector3Moved( const JoyStickEvent &arg, int index) - { - std::cout.precision(2); - std::cout.flags(std::ios::fixed | std::ios::right); - std::cout << std::endl << arg.device->vendor() << ". Orientation # " << index - << " X Value: " << arg.state.mVectors[index].x - << " Y Value: " << arg.state.mVectors[index].y - << " Z Value: " << arg.state.mVectors[index].z; - std::cout.precision(); - std::cout.flags(); - return true; - } -}; - -//Create a global instance -EventHandler handler; - -int main() -{ - std::cout << "\n\n*** OIS Console Demo App is starting up... *** \n"; - try - { - doStartup(); - std::cout << "\nStartup done... Hit 'q' or ESC to exit.\n\n"; - - while(appRunning) - { - //Throttle down CPU usage - #if defined OIS_WIN32_PLATFORM - Sleep(90); - MSG msg; - while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) - { - TranslateMessage( &msg ); - DispatchMessage( &msg ); - } - #elif defined OIS_LINUX_PLATFORM - checkX11Events(); - usleep( 500 ); - #elif defined OIS_APPLE_PLATFORM - checkMacEvents(); - usleep( 500 ); - #endif - - if( g_kb ) - { - g_kb->capture(); - if( !g_kb->buffered() ) - handleNonBufferedKeys(); - } - - if( g_m ) - { - g_m->capture(); - if( !g_m->buffered() ) - handleNonBufferedMouse(); - } - - for( int i = 0; i < 4 ; ++i ) - { - if( g_joys[i] ) - { - g_joys[i]->capture(); - if( !g_joys[i]->buffered() ) - handleNonBufferedJoy( g_joys[i] ); - } - } - } - } - catch( const Exception &ex ) - { - #if defined OIS_WIN32_PLATFORM - MessageBox( NULL, ex.eText, "An exception has occurred!", MB_OK | - MB_ICONERROR | MB_TASKMODAL); - #else - std::cout << "\nOIS Exception Caught!\n" << "\t" << ex.eText << "[Line " - << ex.eLine << " in " << ex.eFile << "]\nExiting App"; - #endif - } - catch(std::exception &ex) - { - std::cout << "Caught std::exception: what = " << ex.what() << std::endl; - } - - //Destroying the manager will cleanup unfreed devices - if( g_InputManager ) - InputManager::destroyInputSystem(g_InputManager); - -#if defined OIS_LINUX_PLATFORM - // Be nice to X and clean up the x window - XDestroyWindow(xDisp, xWin); - XCloseDisplay(xDisp); -#endif - - std::cout << "\n\nGoodbye\n\n"; - return 0; -} - -void doStartup() -{ - ParamList pl; - -#if defined OIS_WIN32_PLATFORM - //Create a capture window for Input Grabbing - hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); - if( hWnd == NULL ) - OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); - - ShowWindow(hWnd, SW_SHOW); - - std::ostringstream wnd; - wnd << (size_t)hWnd; - - pl.insert(std::make_pair( std::string("WINDOW"), wnd.str() )); - - //Default mode is foreground exclusive..but, we want to show mouse - so nonexclusive -// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_FOREGROUND" ))); -// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_NONEXCLUSIVE"))); -#elif defined OIS_LINUX_PLATFORM - //Connects to default X window - if( !(xDisp = XOpenDisplay(0)) ) - OIS_EXCEPT(E_General, "Error opening X!"); - //Create a window - xWin = XCreateSimpleWindow(xDisp,DefaultRootWindow(xDisp), 0,0, 100,100, 0, 0, 0); - //bind our connection to that window - XMapWindow(xDisp, xWin); - //Select what events we want to listen to locally - XSelectInput(xDisp, xWin, StructureNotifyMask); - XEvent evtent; - do - { - XNextEvent(xDisp, &evtent); - } while(evtent.type != MapNotify); - - std::ostringstream wnd; - wnd << xWin; - - pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); - - //For this demo, show mouse and do not grab (confine to window) -// pl.insert(std::make_pair(std::string("x11_mouse_grab"), std::string("false"))); -// pl.insert(std::make_pair(std::string("x11_mouse_hide"), std::string("false"))); -#elif defined OIS_APPLE_PLATFORM - // create the window rect in global coords - ::Rect windowRect; - windowRect.left = 0; - windowRect.top = 0; - windowRect.right = 300; - windowRect.bottom = 300; - - // set the default attributes for the window - WindowAttributes windowAttrs = kWindowStandardDocumentAttributes - | kWindowStandardHandlerAttribute - | kWindowInWindowMenuAttribute - | kWindowHideOnFullScreenAttribute; - - // Create the window - CreateNewWindow(kDocumentWindowClass, windowAttrs, &windowRect, &mWin); - - // Color the window background black - SetThemeWindowBackground (mWin, kThemeBrushBlack, true); - - // Set the title of our window - CFStringRef titleRef = CFStringCreateWithCString( kCFAllocatorDefault, "OIS Input", kCFStringEncodingASCII ); - SetWindowTitleWithCFString( mWin, titleRef ); - - // Center our window on the screen - RepositionWindow( mWin, NULL, kWindowCenterOnMainScreen ); - - // Install the event handler for the window - InstallStandardEventHandler(GetWindowEventTarget(mWin)); - - // This will give our window focus, and not lock it to the terminal - ProcessSerialNumber psn = { 0, kCurrentProcess }; - TransformProcessType( &psn, kProcessTransformToForegroundApplication ); - SetFrontProcess(&psn); - - // Display and select our window - ShowWindow(mWin); - SelectWindow(mWin); - - std::ostringstream wnd; - wnd << (unsigned int)mWin; //cast to int so it gets encoded correctly (else it gets stored as a hex string) - std::cout << "WindowRef: " << mWin << " WindowRef as int: " << wnd.str() << "\n"; - pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); -#endif - - //This never returns null.. it will raise an exception on errors - g_InputManager = InputManager::createInputSystem(pl); - - //Lets enable all addons that were compiled in: - g_InputManager->enableAddOnFactory(InputManager::AddOn_All); - - //Print debugging information - unsigned int v = g_InputManager->getVersionNumber(); - std::cout << "OIS Version: " << (v>>16 ) << "." << ((v>>8) & 0x000000FF) << "." << (v & 0x000000FF) - << "\nRelease Name: " << g_InputManager->getVersionName() - << "\nManager: " << g_InputManager->inputSystemName() - << "\nTotal Keyboards: " << g_InputManager->getNumberOfDevices(OISKeyboard) - << "\nTotal Mice: " << g_InputManager->getNumberOfDevices(OISMouse) - << "\nTotal JoySticks: " << g_InputManager->getNumberOfDevices(OISJoyStick); - - //List all devices - DeviceList list = g_InputManager->listFreeDevices(); - for( DeviceList::iterator i = list.begin(); i != list.end(); ++i ) - std::cout << "\n\tDevice: " << g_DeviceType[i->first] << " Vendor: " << i->second; - - g_kb = (Keyboard*)g_InputManager->createInputObject( OISKeyboard, true ); - g_kb->setEventCallback( &handler ); - - g_m = (Mouse*)g_InputManager->createInputObject( OISMouse, true ); - g_m->setEventCallback( &handler ); - const MouseState &ms = g_m->getMouseState(); - ms.width = 100; - ms.height = 100; - - try - { - //This demo uses at most 4 joysticks - use old way to create (i.e. disregard vendor) - int numSticks = std::min(g_InputManager->getNumberOfDevices(OISJoyStick), 4); - for( int i = 0; i < numSticks; ++i ) - { - g_joys[i] = (JoyStick*)g_InputManager->createInputObject( OISJoyStick, true ); - g_joys[i]->setEventCallback( &handler ); - std::cout << "\n\nCreating Joystick " << (i + 1) - << "\n\tAxes: " << g_joys[i]->getNumberOfComponents(OIS_Axis) - << "\n\tSliders: " << g_joys[i]->getNumberOfComponents(OIS_Slider) - << "\n\tPOV/HATs: " << g_joys[i]->getNumberOfComponents(OIS_POV) - << "\n\tButtons: " << g_joys[i]->getNumberOfComponents(OIS_Button) - << "\n\tVector3: " << g_joys[i]->getNumberOfComponents(OIS_Vector3); - } - } - catch(OIS::Exception &ex) - { - std::cout << "\nException raised on joystick creation: " << ex.eText << std::endl; - } -} - -void handleNonBufferedKeys() -{ - if( g_kb->isKeyDown( KC_ESCAPE ) || g_kb->isKeyDown( KC_Q ) ) - appRunning = false; - - if( g_kb->isModifierDown(Keyboard::Shift) ) - std::cout << "Shift is down..\n"; - if( g_kb->isModifierDown(Keyboard::Alt) ) - std::cout << "Alt is down..\n"; - if( g_kb->isModifierDown(Keyboard::Ctrl) ) - std::cout << "Ctrl is down..\n"; -} - -void handleNonBufferedMouse() -{ - //Just dump the current mouse state - const MouseState &ms = g_m->getMouseState(); - std::cout << "\nMouse: Abs(" << ms.X.abs << " " << ms.Y.abs << " " << ms.Z.abs - << ") B: " << ms.buttons << " Rel(" << ms.X.rel << " " << ms.Y.rel << " " << ms.Z.rel << ")"; -} - -void handleNonBufferedJoy( JoyStick* js ) -{ - //Just dump the current joy state - const JoyStickState &joy = js->getJoyStickState(); - for( unsigned int i = 0; i < joy.mAxes.size(); ++i ) - std::cout << "\nAxis " << i << " X: " << joy.mAxes[i].abs; -} - -#if defined OIS_WIN32_PLATFORM -LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) -{ - return FALSE; -} -#endif - -#if defined OIS_LINUX_PLATFORM -//This is just here to show that you still recieve x11 events, as the lib only needs mouse/key events -void checkX11Events() -{ - XEvent event; - - //Poll x11 for events (keyboard and mouse events are caught here) - while( XPending(xDisp) > 0 ) - { - XNextEvent(xDisp, &event); - //Handle Resize events - if( event.type == ConfigureNotify ) - { - if( g_m ) - { - const MouseState &ms = g_m->getMouseState(); - ms.width = event.xconfigure.width; - ms.height = event.xconfigure.height; - } - } - else if( event.type == DestroyNotify ) - { - std::cout << "Exiting...\n"; - appRunning = false; - } - else - std::cout << "\nUnknown X Event: " << event.type << std::endl; - } -} -#endif - -#if defined OIS_APPLE_PLATFORM -void checkMacEvents() -{ - //TODO - Check for window resize events, and then adjust the members of mousestate - EventRef event = NULL; - EventTargetRef targetWindow = GetEventDispatcherTarget(); - - if( ReceiveNextEvent( 0, NULL, kEventDurationNoWait, true, &event ) == noErr ) - { - SendEventToEventTarget(event, targetWindow); - std::cout << "Event : " << GetEventKind(event) << "\n"; - ReleaseEvent(event); - } -} -#endif diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp deleted file mode 100644 index 6e70213..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp +++ /dev/null @@ -1,563 +0,0 @@ -/* -The zlib/libpng License - -Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) - -This software is provided 'as-is', without any express or implied warranty. In no event will -the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial -applications, and to alter it and redistribute it freely, subject to the following -restrictions: - - 1. The origin of this software must not be misrepresented; you must not claim that - you wrote the original software. If you use this software in a product, - an acknowledgment in the product documentation would be appreciated but is - not required. - - 2. Altered source versions must be plainly marked as such, and must not be - misrepresented as being the original software. - - 3. This notice may not be removed or altered from any source distribution. -*/ -#include "linux/LinuxForceFeedback.h" -#include "OISException.h" - -#include -#include -#include - -#ifdef HAVE_UNISTD_H -#include -#endif - -using namespace OIS; - -// 0 = No trace; 1 = Important traces; 2 = Debug traces -#define OIS_LINUX_JOYFF_DEBUG 1 - -#ifdef OIS_LINUX_JOYFF_DEBUG -# include - using namespace std; -#endif - -//--------------------------------------------------------------// -LinuxForceFeedback::LinuxForceFeedback(int deviceID) : - ForceFeedback(), mJoyStick(deviceID) -{ -} - -//--------------------------------------------------------------// -LinuxForceFeedback::~LinuxForceFeedback() -{ - // Unload all effects. - for(EffectList::iterator i = mEffectList.begin(); i != mEffectList.end(); ++i ) - { - struct ff_effect *linEffect = i->second; - if( linEffect ) - _unload(linEffect->id); - } - - mEffectList.clear(); -} - -//--------------------------------------------------------------// -unsigned short LinuxForceFeedback::getFFMemoryLoad() -{ - int nEffects = -1; - if (ioctl(mJoyStick, EVIOCGEFFECTS, &nEffects) == -1) - OIS_EXCEPT(E_General, "Unknown error reading max number of uploaded effects."); -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << "LinuxForceFeedback("<< mJoyStick - << ") : Read device max number of uploaded effects : " << nEffects << endl; -#endif - - return (unsigned short int)(nEffects > 0 ? 100.0*mEffectList.size()/nEffects : 100); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::setMasterGain(float value) -{ - if (!mSetGainSupport) - { -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain " - << "is not supported by the device" << endl; -#endif - return; - } - - struct input_event event; - - memset(&event, 0, sizeof(event)); - event.type = EV_FF; - event.code = FF_GAIN; - if (value < 0.0) - value = 0.0; - else if (value > 1.0) - value = 1.0; - event.value = (__s32)(value * 0xFFFFUL); - -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain to " - << value << " => " << event.value << endl; -#endif - - if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { - OIS_EXCEPT(E_General, "Unknown error changing master gain."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::setAutoCenterMode(bool enabled) -{ - if (!mSetAutoCenterSupport) - { -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting auto-center mode " - << "is not supported by the device" << endl; -#endif - return; - } - - struct input_event event; - - memset(&event, 0, sizeof(event)); - event.type = EV_FF; - event.code = FF_AUTOCENTER; - event.value = (__s32)(enabled*0xFFFFFFFFUL); - -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Toggling auto-center to " - << enabled << " => 0x" << hex << event.value << dec << endl; -#endif - - if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { - OIS_EXCEPT(E_General, "Unknown error toggling auto-center."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::upload( const Effect* effect ) -{ - switch( effect->force ) - { - case OIS::Effect::ConstantForce: - _updateConstantEffect(effect); - break; - case OIS::Effect::ConditionalForce: - _updateConditionalEffect(effect); - break; - case OIS::Effect::PeriodicForce: - _updatePeriodicEffect(effect); - break; - case OIS::Effect::RampForce: - _updateRampEffect(effect); - break; - case OIS::Effect::CustomForce: - //_updateCustomEffect(effect); - //break; - default: - OIS_EXCEPT(E_NotImplemented, "Requested force not implemented yet, sorry!"); - break; - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::modify( const Effect* effect ) -{ - upload(effect); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::remove( const Effect* effect ) -{ - //Get the effect - if it exists - EffectList::iterator i = mEffectList.find(effect->_handle); - if( i != mEffectList.end() ) - { - struct ff_effect *linEffect = i->second; - if( linEffect ) - { - _stop(effect->_handle); - - _unload(effect->_handle); - - free(linEffect); - - mEffectList.erase(i); - } - else - mEffectList.erase(i); - } -} - -//--------------------------------------------------------------// -// To Signed16/Unsigned15 safe conversions -#define MaxUnsigned15Value 0x7FFF -#define toUnsigned15(value) \ - (__u16)((value) < 0 ? 0 : ((value) > MaxUnsigned15Value ? MaxUnsigned15Value : (value))) - -#define MaxSigned16Value 0x7FFF -#define MinSigned16Value -0x7FFF -#define toSigned16(value) \ - (__s16)((value) < MinSigned16Value ? MinSigned16Value : ((value) > MaxSigned16Value ? MaxSigned16Value : (value))) - -// OIS to Linux duration -#define LinuxInfiniteDuration 0xFFFF -#define OISDurationUnitMS 1000 // OIS duration unit (microseconds), expressed in milliseconds (theLinux duration unit) - -// linux/input.h : All duration values are expressed in ms. Values above 32767 ms (0x7fff) -// should not be used and have unspecified results. -#define LinuxDuration(oisDuration) ((oisDuration) == Effect::OIS_INFINITE ? LinuxInfiniteDuration \ - : toUnsigned15((oisDuration)/OISDurationUnitMS)) - - -// OIS to Linux levels -#define OISMaxLevel 10000 -#define LinuxMaxLevel 0x7FFF - -// linux/input.h : Valid range for the attack and fade levels is 0x0000 - 0x7fff -#define LinuxPositiveLevel(oisLevel) toUnsigned15(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) - -#define LinuxSignedLevel(oisLevel) toSigned16(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) - - -//--------------------------------------------------------------// -void LinuxForceFeedback::_setCommonProperties(struct ff_effect *event, - struct ff_envelope *ffenvelope, - const Effect* effect, const Envelope *envelope ) -{ - memset(event, 0, sizeof(struct ff_effect)); - - if (envelope && ffenvelope && envelope->isUsed()) { - ffenvelope->attack_length = LinuxDuration(envelope->attackLength); - ffenvelope->attack_level = LinuxPositiveLevel(envelope->attackLevel); - ffenvelope->fade_length = LinuxDuration(envelope->fadeLength); - ffenvelope->fade_level = LinuxPositiveLevel(envelope->fadeLevel); - } - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl; - if (envelope && ffenvelope) - { - cout << " Enveloppe :" << endl - << " AttackLen : " << envelope->attackLength - << " => " << ffenvelope->attack_length << endl - << " AttackLvl : " << envelope->attackLevel - << " => " << ffenvelope->attack_level << endl - << " FadeLen : " << envelope->fadeLength - << " => " << ffenvelope->fade_length << endl - << " FadeLvl : " << envelope->fadeLevel - << " => " << ffenvelope->fade_level << endl; - } -#endif - - event->direction = (__u16)(1 + (effect->direction*45.0+135.0)*0xFFFFUL/360.0); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Direction : " << Effect::getDirectionName(effect->direction) - << " => 0x" << hex << event->direction << dec << endl; -#endif - - // TODO trigger_button 0 vs. -1 - event->trigger.button = effect->trigger_button; // < 0 ? 0 : effect->trigger_button; - event->trigger.interval = LinuxDuration(effect->trigger_interval); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Trigger :" << endl - << " Button : " << effect->trigger_button - << " => " << event->trigger.button << endl - << " Interval : " << effect->trigger_interval - << " => " << event->trigger.interval << endl; -#endif - - event->replay.length = LinuxDuration(effect->replay_length); - event->replay.delay = LinuxDuration(effect->replay_delay); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Replay :" << endl - << " Length : " << effect->replay_length - << " => " << event->replay.length << endl - << " Delay : " << effect->replay_delay - << " => " << event->replay.delay << endl; -#endif -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateConstantEffect( const Effect* eff ) -{ - struct ff_effect event; - - ConstantEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); - - event.type = FF_CONSTANT; - event.id = -1; - - event.u.constant.level = LinuxSignedLevel(effect->level); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Level : " << effect->level - << " => " << event.u.constant.level << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateRampEffect( const Effect* eff ) -{ - struct ff_effect event; - - RampEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); - - event.type = FF_RAMP; - event.id = -1; - - event.u.ramp.start_level = LinuxSignedLevel(effect->startLevel); - event.u.ramp.end_level = LinuxSignedLevel(effect->endLevel); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " StartLevel : " << effect->startLevel - << " => " << event.u.ramp.start_level << endl - << " EndLevel : " << effect->endLevel - << " => " << event.u.ramp.end_level << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updatePeriodicEffect( const Effect* eff ) -{ - struct ff_effect event; - - PeriodicEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.periodic.envelope, eff, &effect->envelope); - - event.type = FF_PERIODIC; - event.id = -1; - - switch( eff->type ) - { - case OIS::Effect::Square: - event.u.periodic.waveform = FF_SQUARE; - break; - case OIS::Effect::Triangle: - event.u.periodic.waveform = FF_TRIANGLE; - break; - case OIS::Effect::Sine: - event.u.periodic.waveform = FF_SINE; - break; - case OIS::Effect::SawToothUp: - event.u.periodic.waveform = FF_SAW_UP; - break; - case OIS::Effect::SawToothDown: - event.u.periodic.waveform = FF_SAW_DOWN; - break; - // Note: No support for Custom periodic force effect for the moment - //case OIS::Effect::Custom: - //event.u.periodic.waveform = FF_CUSTOM; - //break; - default: - OIS_EXCEPT(E_General, "No such available effect for Periodic force!"); - break; - } - - event.u.periodic.period = LinuxDuration(effect->period); - event.u.periodic.magnitude = LinuxPositiveLevel(effect->magnitude); - event.u.periodic.offset = LinuxPositiveLevel(effect->offset); - event.u.periodic.phase = (__u16)(effect->phase*event.u.periodic.period/36000.0); // ????? - - // Note: No support for Custom periodic force effect for the moment - event.u.periodic.custom_len = 0; - event.u.periodic.custom_data = 0; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Magnitude : " << effect->magnitude - << " => " << event.u.periodic.magnitude << endl - << " Period : " << effect->period - << " => " << event.u.periodic.period << endl - << " Offset : " << effect->offset - << " => " << event.u.periodic.offset << endl - << " Phase : " << effect->phase - << " => " << event.u.periodic.phase << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateConditionalEffect( const Effect* eff ) -{ - struct ff_effect event; - - ConditionalEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, NULL, eff, NULL); - - switch( eff->type ) - { - case OIS::Effect::Friction: - event.type = FF_FRICTION; - break; - case OIS::Effect::Damper: - event.type = FF_DAMPER; - break; - case OIS::Effect::Inertia: - event.type = FF_INERTIA; - break; - case OIS::Effect::Spring: - event.type = FF_SPRING; - break; - default: - OIS_EXCEPT(E_General, "No such available effect for Conditional force!"); - break; - } - - event.id = -1; - - event.u.condition[0].right_saturation = LinuxSignedLevel(effect->rightSaturation); - event.u.condition[0].left_saturation = LinuxSignedLevel(effect->leftSaturation); - event.u.condition[0].right_coeff = LinuxSignedLevel(effect->rightCoeff); - event.u.condition[0].left_coeff = LinuxSignedLevel(effect->leftCoeff); - event.u.condition[0].deadband = LinuxPositiveLevel(effect->deadband);// Unit ?? - event.u.condition[0].center = LinuxSignedLevel(effect->center); // Unit ?? TODO ? - - // TODO support for second condition - event.u.condition[1] = event.u.condition[0]; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Condition[0] : " << endl - << " RightSaturation : " << effect->rightSaturation - << " => " << event.u.condition[0].right_saturation << endl - << " LeftSaturation : " << effect->leftSaturation - << " => " << event.u.condition[0]. left_saturation << endl - << " RightCoefficient : " << effect->rightCoeff - << " => " << event.u.condition[0].right_coeff << endl - << " LeftCoefficient : " << effect->leftCoeff - << " => " << event.u.condition[0].left_coeff << endl - << " DeadBand : " << effect->deadband - << " => " << event.u.condition[0].deadband << endl - << " Center : " << effect->center - << " => " << event.u.condition[0].center << endl; - cout << " Condition[1] : Not implemented" << endl; -#endif - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_upload( struct ff_effect* ffeffect, const Effect* effect) -{ - struct ff_effect *linEffect = 0; - - //Get the effect - if it exists - EffectList::iterator i = mEffectList.find(effect->_handle); - //It has been created already - if( i != mEffectList.end() ) - linEffect = i->second; - - if( linEffect == 0 ) - { -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Adding new effect : " - << Effect::getEffectTypeName(effect->type) << endl; -#endif - - //This effect has not yet been created, so create it in the device - if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { - // TODO device full check - // OIS_EXCEPT(E_DeviceFull, "Remove an effect before adding more!"); - OIS_EXCEPT(E_General, "Unknown error creating effect (may be the device is full)->.."); - } - - // Save returned effect handle - effect->_handle = ffeffect->id; - - // Save a copy of the uploaded effect for later simple modifications - linEffect = (struct ff_effect *)calloc(1, sizeof(struct ff_effect)); - memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); - - mEffectList[effect->_handle] = linEffect; - - // Start playing the effect. - _start(effect->_handle); - } - else - { -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Replacing effect : " - << Effect::getEffectTypeName(effect->type) << endl; -#endif - - // Keep same id/handle, as this is just an update in the device. - ffeffect->id = effect->_handle; - - // Update effect in the device. - if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { - OIS_EXCEPT(E_General, "Unknown error updating an effect->.."); - } - - // Update local linEffect for next time. - memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); - } - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << "LinuxForceFeedback("<< mJoyStick - << ") : Effect handle : " << effect->_handle << endl; -#endif -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_stop( int handle) { - struct input_event stop; - - stop.type = EV_FF; - stop.code = handle; - stop.value = 0; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Stopping effect with handle " << handle << endl; -#endif - - if (write(mJoyStick, &stop, sizeof(stop)) != sizeof(stop)) { - OIS_EXCEPT(E_General, "Unknown error stopping effect->.."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_start( int handle) { - struct input_event play; - - play.type = EV_FF; - play.code = handle; - play.value = 1; // Play once. - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Starting effect with handle " << handle << endl; -#endif - - if (write(mJoyStick, &play, sizeof(play)) != sizeof(play)) { - OIS_EXCEPT(E_General, "Unknown error playing effect->.."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_unload( int handle) -{ -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Removing effect with handle " << handle << endl; -#endif - - if (ioctl(mJoyStick, EVIOCRMFF, handle) == -1) { - OIS_EXCEPT(E_General, "Unknown error removing effect->.."); - } -} diff --git a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp b/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp deleted file mode 100644 index 87dd977..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp +++ /dev/null @@ -1,308 +0,0 @@ -/* -The zlib/libpng License - -Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) - -This software is provided 'as-is', without any express or implied warranty. In no event will -the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial -applications, and to alter it and redistribute it freely, subject to the following -restrictions: - - 1. The origin of this software must not be misrepresented; you must not claim that - you wrote the original software. If you use this software in a product, - an acknowledgment in the product documentation would be appreciated but is - not required. - - 2. Altered source versions must be plainly marked as such, and must not be - misrepresented as being the original software. - - 3. This notice may not be removed or altered from any source distribution. -*/ -#include "OISConfig.h" - -#include "linux/LinuxJoyStickEvents.h" -#include "linux/LinuxInputManager.h" -#include "linux/LinuxForceFeedback.h" -#include "linux/EventHelpers.h" - -#include "OISEvents.h" -#include "OISException.h" - -#include //Needed to Open a file descriptor -#ifdef HAVE_UNISTD_H -#include -#endif -#include -#include - - -#include -# include -using namespace std; - -using namespace OIS; - -//#define OIS_LINUX_JOY_DEBUG - -//-------------------------------------------------------------------// -LinuxJoyStick::LinuxJoyStick(InputManager* creator, bool buffered, const JoyStickInfo& js) - : JoyStick(js.vendor, buffered, js.devId, creator) -{ - mJoyStick = js.joyFileD; - - mState.mAxes.clear(); - mState.mAxes.resize(js.axes); - mState.mButtons.clear(); - mState.mButtons.resize(js.buttons); - - mPOVs = js.hats; - - mButtonMap = js.button_map; - mAxisMap = js.axis_map; - mRanges = js.axis_range; - - ff_effect = 0; -} - -//-------------------------------------------------------------------// -LinuxJoyStick::~LinuxJoyStick() -{ - EventUtils::removeForceFeedback( &ff_effect ); -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::_initialize() -{ - //Clear old joy state - mState.mAxes.resize(mAxisMap.size()); - mState.clear(); - - //This will create and new us a force feedback structure if it exists - EventUtils::enumerateForceFeedback( mJoyStick, &ff_effect ); - - if( mJoyStick == -1 ) - OIS_EXCEPT(E_InputDeviceNonExistant, "LinuxJoyStick::_initialize() >> JoyStick Not Found!"); -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::capture() -{ - static const short POV_MASK[8] = {0,0,1,1,2,2,3,3}; - - //Used to determine if an axis has been changed and needs an event - bool axisMoved[32] = {false, false, false, false, false, false, false, false, false, false, false, false, false, - false, false, false, false, false, false, false, false, false, false, false, false, false, - false, false, false, false, false, false}; - - //We are in non blocking mode - we just read once, and try to fill up buffer - input_event js[JOY_BUFFERSIZE]; - while(true) - { - int ret = read(mJoyStick, &js, sizeof(struct input_event) * JOY_BUFFERSIZE); - if( ret < 0 ) - break; - - //Determine how many whole events re read up - ret /= sizeof(struct input_event); - for(int i = 0; i < ret; ++i) - { - switch(js[i].type) - { - case EV_KEY: //Button - { - int button = mButtonMap[js[i].code]; - - #ifdef OIS_LINUX_JOY_DEBUG - cout << "\nButton Code: " << js[i].code << ", OIS Value: " << button << endl; - #endif - - //Check to see whether push or released event... - if(js[i].value) - { - mState.mButtons[button] = true; - if( mBuffered && mListener ) - if(!mListener->buttonPressed(JoyStickEvent(this,mState), button)) return; - } - else - { - mState.mButtons[button] = false; - if( mBuffered && mListener ) - if(!mListener->buttonReleased(JoyStickEvent(this,mState), button)) return; - } - break; - } - - case EV_ABS: //Absolute Axis - { - //A Stick (BrakeDefine is the highest possible Axis) - if( js[i].code <= ABS_BRAKE ) - { - int axis = mAxisMap[js[i].code]; - assert( axis < 32 && "Too many axes (Max supported is 32). Report this to OIS forums!" ); - - axisMoved[axis] = true; - - //check for rescaling: - if( mRanges[axis].min == JoyStick::MIN_AXIS && mRanges[axis].max != JoyStick::MAX_AXIS ) - { //Scale is perfect - mState.mAxes[axis].abs = js[i].value; - } - else - { //Rescale - float proportion = (float)(js[i].value-mRanges[axis].max)/(float)(mRanges[axis].min-mRanges[axis].max); - mState.mAxes[axis].abs = (int)(32767.0f - (65535.0f * proportion)); - } - } - else if( js[i].code <= ABS_HAT3Y ) //A POV - Max four POVs allowed - { - //Normalise the POV to between 0-7 - //Even is X Axis, Odd is Y Axis - unsigned char LinuxPovNumber = js[i].code - 16; - short OIS_POVIndex = POV_MASK[LinuxPovNumber]; - - //Handle X Axis first (Even) (left right) - if((LinuxPovNumber & 0x0001) == 0) - { - //Why do this? Because, we use a bit field, and when this axis is east, - //it can't possibly be west too. So clear out the two X axes, then refil - //it in with the new direction bit. - //Clear the East/West Bit Flags first - mState.mPOV[OIS_POVIndex].direction &= 0x11110011; - if( js[i].value == -1 ) //Left - mState.mPOV[OIS_POVIndex].direction |= Pov::West; - else if( js[i].value == 1 ) //Right - mState.mPOV[OIS_POVIndex].direction |= Pov::East; - } - //Handle Y Axis (Odd) (up down) - else - { - //Clear the North/South Bit Flags first - mState.mPOV[OIS_POVIndex].direction &= 0x11111100; - if( js[i].value == -1 ) //Up - mState.mPOV[OIS_POVIndex].direction |= Pov::North; - else if( js[i].value == 1 ) //Down - mState.mPOV[OIS_POVIndex].direction |= Pov::South; - } - - if( mBuffered && mListener ) - if( mListener->povMoved( JoyStickEvent(this,mState), OIS_POVIndex) == false ) - return; - } - break; - } - - - case EV_REL: //Relative Axes (Do any joystick actually have a relative axis?) - #ifdef OIS_LINUX_JOY_DEBUG - cout << "\nWarning: Relatives axes not supported yet" << endl; - #endif - break; - default: break; - } - } - } - - //All axes and POVs are combined into one movement per pair per captured frame - if( mBuffered && mListener ) - { - for( int i = 0; i < 32; ++i ) - if( axisMoved[i] ) - if( mListener->axisMoved( JoyStickEvent(this,mState), i) == false ) - return; - } -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::setBuffered(bool buffered) -{ - if( buffered != mBuffered ) - { - mBuffered = buffered; - _initialize(); - } -} - -//-------------------------------------------------------------------// -JoyStickInfo LinuxJoyStick::_getJoyInfo() -{ - JoyStickInfo js; - - js.devId = mDevID; - js.joyFileD = mJoyStick; - js.vendor = mVendor; - js.axes = (int)mState.mAxes.size(); - js.buttons = (int)mState.mButtons.size(); - js.hats = mPOVs; - js.button_map = mButtonMap; - js.axis_map = mAxisMap; - js.axis_range = mRanges; - - return js; -} - -//-------------------------------------------------------------------// -JoyStickInfoList LinuxJoyStick::_scanJoys() -{ - JoyStickInfoList joys; - - //Search through all of the event devices.. and identify which ones are joysticks - //xxx move this to InputManager, as it can also scan all other events - for(int i = 0; i < 64; ++i ) - { - stringstream s; - s << "/dev/input/event" << i; - int fd = open( s.str().c_str(), O_RDWR |O_NONBLOCK ); - if(fd == -1) - continue; - - #ifdef OIS_LINUX_JOY_DEBUG - cout << "Opening " << s.str() << "..." << endl; - #endif - try - { - JoyStickInfo js; - if( EventUtils::isJoyStick(fd, js) ) - { - joys.push_back(js); - #ifdef OIS_LINUX_JOY_DEBUG - cout << "=> Joystick added to list." << endl; - #endif - } - else - { - #ifdef OIS_LINUX_JOY_DEBUG - cout << "=> Not a joystick." << endl; - #endif - close(fd); - } - } - catch(...) - { - #ifdef OIS_LINUX_JOY_DEBUG - cout << "Exception caught!!" << endl; - #endif - close(fd); - } - } - - return joys; -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::_clearJoys(JoyStickInfoList &joys) -{ - for(JoyStickInfoList::iterator i = joys.begin(); i != joys.end(); ++i) - close(i->joyFileD); - joys.clear(); -} - -//-------------------------------------------------------------------// -Interface* LinuxJoyStick::queryInterface(Interface::IType type) -{ - if( ff_effect && type == Interface::ForceFeedback ) - return ff_effect; - - return 0; -} diff --git a/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt deleted file mode 100644 index 65ae11f..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt +++ /dev/null @@ -1,546 +0,0 @@ -# cmake_minimum_required (VERSION 2.6) -# project (OXYGINE) -# -# include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") -# cmaki_find_package(sdl2 REQUIRED) -# cmaki_find_package(freeimage REQUIRED) -# cmaki_find_package(dune-zlib REQUIRED) -# cmaki_find_package(haxx-libcurl REQUIRED) -# -# include_directories(${CMAKI_INCLUDE_DIRS}) -# set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) -# -# if (EMSCRIPTEN) -# #don't need SDL2 -# elseif (WIN32) -# #hardcoded path to SDL2 on windows -# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# else(WIN32) -# # find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) -# # message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) -# # -# # if (SDL2_INCLUDE_DIRS) -# # set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) -# # message(STATUS "found") -# # else() -# # message(STATUS "SDL notfound") -# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# # endif() -# -# find_package(CURL) -# endif(EMSCRIPTEN) -# -# -# set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) -# set(OXYGINE_SRC ${OXYGINE_ROOT}/src) -# -# set(FOLDERS src src/closure src/minizip src/core -# src/core/gl src/dev_tools src/minizip -# src/math src/pugixml src/json src/res -# src/text_utils src/utils src/winnie_alloc) -# -# -# if (EMSCRIPTEN) -# set(PLATFORM emscripten) -# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") -# set(PLATFORM linux) -# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -# set(PLATFORM ios) -# elseif(MSVC) -# set(PLATFORM win32) -# elseif(MINGW) -# set(PLATFORM win32_mingw) -# endif() -# -# -# set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) -# -# -# -# if (EMSCRIPTEN) -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_HTTP 1) -# set(OX_USE_SDL2 0) -# -# set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) -# -# -# file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) -# -# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") -# -# set(OX_HAVE_LIBJPEG 1) -# set(OX_HAVE_LIBPNG 1) -# -# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -# elseif(MSVC) -# -# set(OX_HAVE_LIBJPEG 1) -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_LIBCURL 1) -# set(OX_HAVE_HTTP 1) -# -# set(libprefix lib) -# -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) -# -# elseif(MINGW) -# -# set(libprefix lib) -# -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_LIBCURL 1) -# set(OX_HAVE_HTTP 1) -# -# endif() -# -# if (OX_HAVE_LIBCURL) -# set(FOLDERS ${FOLDERS} src/core/curl) -# include_directories(${THIRD_PARTY}/curl/) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) -# endif(OX_HAVE_LIBCURL) -# -# -# if (NOT OX_HAVE_HTTP) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) -# endif(NOT OX_HAVE_HTTP) -# -# -# -# foreach(ITEM ${FOLDERS}) -# file(GLOB FLS -# ${OXYGINE_ROOT}/${ITEM}/*.cpp -# ${OXYGINE_ROOT}/${ITEM}/*.c -# ${OXYGINE_ROOT}/${ITEM}/*.h) -# set(SOURCES ${SOURCES} ${FLS}) -# string(REPLACE / \\ SGROUP ${ITEM}) -# source_group(${SGROUP} FILES ${FLS}) -# endforeach(ITEM) -# -# -# set(OXYGINE_INCLUDE_DIRS -# ${OXYGINE_SRC} -# ${THIRD_PARTY}/pthreads/include/ -# ${THIRD_PARTY}/zlib) -# -# -# set(OXYGINE_LIBRARY_DIRS -# ${OXYGINE_LIBRARY_DIRS} -# ${OXYGINE_SOURCE_DIR}/libs -# ${THIRD_PARTY}/libraries) -# -# -# if (FORCE_GLES) -# set(OPENGL_LIBRARIES libGLESv2.lib) -# endif(FORCE_GLES) -# -# -# if (MINGW) -# set(CORE_LIBS ${CORE_LIBS} mingw32) -# endif(MINGW) -# -# -# set(CORE_LIBS -# ${CORE_LIBS} -# oxygine-framework -# ${OPENGL_LIBRARIES} -# ) -# -# -# if (OX_USE_SDL2) -# set(CORE_LIBS ${CORE_LIBS} -# SDL2main SDL2) -# set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) -# endif(OX_USE_SDL2) -# -# -# if (WIN32) -# set(CORE_LIBS ${CORE_LIBS} -# pthreadVCE2 -# libcurl_imp -# ws2_32) -# elseif(EMSCRIPTEN) -# else(WIN32) -# set(CORE_LIBS ${CORE_LIBS} pthread) -# endif(WIN32) -# -# -# -# if (OX_HAVE_LIBPNG) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) -# include_directories(${THIRD_PARTY}/libpng) -# set(LIBPNG ${libprefix}png) -# -# if (MSVC) -# if (MSVC_VERSION EQUAL "1900") -# set(LIBPNG ${LIBPNG}-2015) -# endif() -# elseif(EMSCRIPTEN) -# set(LIBPNG libz libpng16) -# endif() -# -# set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) -# endif(OX_HAVE_LIBPNG) -# -# -# if (OX_HAVE_LIBJPEG) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) -# include_directories(${THIRD_PARTY}/libjpeg) -# set(LIBJPEG ${libprefix}jpeg) -# -# if (MSVC) -# if (MSVC_VERSION EQUAL "1900") -# set(LIBJPEG ${LIBJPEG}-2015) -# endif() -# endif() -# -# set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) -# endif(OX_HAVE_LIBJPEG) -# -# -# if (NOT EMSCRIPTEN) -# set(CORE_LIBS ${CORE_LIBS} -# ${libprefix}z${libprefix}) -# endif(NOT EMSCRIPTEN) -# -# -# if (NOT MSVC) -# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ") -# endif(NOT MSVC) -# -# -# add_definitions(${OX_DEFINITIONS}) -# include_directories(${OXYGINE_INCLUDE_DIRS}) -# add_library(oxygine-framework STATIC ${SOURCES}) -# -# -# set(OXYGINE_LIBRARY_DIRS -# ${OXYGINE_LIBRARY_DIRS} -# PARENT_SCOPE) -# -# set(OXYGINE_CORE_LIBS -# ${CORE_LIBS} -# PARENT_SCOPE) -# -# set(OXYGINE_DEFINITIONS -# ${OX_DEFINITIONS} -# PARENT_SCOPE) -# -# set(OXYGINE_INCLUDE_DIRS -# ${OXYGINE_INCLUDE_DIRS} -# PARENT_SCOPE) -# -# set(OXYGINE_JS_LIBRARIES -# ${OXYGINE_JS_LIBRARIES} -# PARENT_SCOPE) -# -# message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") -# message(STATUS "Libs: ${CORE_LIBS}") -# message(STATUS "Platform: ${PLATFORM}") -# -# set(CMAKE_INSTALL_PREFIX ../libs) -# install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) -# install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) - - - - - - - - - - - - - - - - - - - - - - - - - - -cmake_minimum_required (VERSION 2.6) -project (OXYGINE) - -include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") -cmaki_find_package(sdl2 REQUIRED) -cmaki_find_package(freeimage REQUIRED) -cmaki_find_package(dune-zlib REQUIRED) -cmaki_find_package(haxx-libcurl REQUIRED) - -include_directories(${CMAKI_INCLUDE_DIRS}) -set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) - -# find_package(OpenGL) -# -# if (EMSCRIPTEN) -# #don't need SDL2 -# elseif (WIN32) -# #hardcoded path to SDL2 on windows -# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# else(WIN32) -# find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) -# message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) -# -# if (SDL2_INCLUDE_DIRS) -# set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) -# message(STATUS "found") -# else() -# message(STATUS "SDL not found") -# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# endif() -# -# find_package(CURL) -# endif(EMSCRIPTEN) - - -set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) -set(OXYGINE_SRC ${OXYGINE_ROOT}/src) - -set(FOLDERS src src/closure src/minizip src/core - src/core/gl src/dev_tools src/minizip - src/math src/pugixml src/json src/res - src/text_utils src/utils src/winnie_alloc) - - -if (EMSCRIPTEN) - set(PLATFORM emscripten) -elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - set(PLATFORM linux) -elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") - set(PLATFORM ios) -elseif(MSVC) - set(PLATFORM win32) -elseif(MINGW) - set(PLATFORM win32_mingw) -endif() - - -set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) - - - -if (EMSCRIPTEN) - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_HTTP 1) - set(OX_USE_SDL2 1) - - set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) - - - file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) - - set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -s USE_SDL=2 -s USE_LIBPNG=1 -s USE_ZLIB=1 -s FULL_ES2=1 ") - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s USE_ZLIB=1")#for minizip.c - -elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - - set(OX_HAVE_LIBJPEG 1) - set(OX_HAVE_LIBPNG 1) - -elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -elseif(MSVC) - - set(OX_HAVE_LIBJPEG 1) - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_LIBCURL 1) - set(OX_HAVE_HTTP 1) - - set(libprefix lib) - - set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) - -elseif(MINGW) - - set(libprefix lib) - - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_LIBCURL 1) - set(OX_HAVE_HTTP 1) - -endif() - -if (OX_HAVE_LIBCURL) - set(FOLDERS ${FOLDERS} src/core/curl) - include_directories(${THIRD_PARTY}/curl/) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) -endif(OX_HAVE_LIBCURL) - - - -if (NOT OX_HAVE_HTTP) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) -endif(NOT OX_HAVE_HTTP) - -if (EMSCRIPTEN) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_MT) -endif(EMSCRIPTEN) - -foreach(ITEM ${FOLDERS}) - file(GLOB FLS - ${OXYGINE_ROOT}/${ITEM}/*.cpp - ${OXYGINE_ROOT}/${ITEM}/*.c - ${OXYGINE_ROOT}/${ITEM}/*.h) - set(SOURCES ${SOURCES} ${FLS}) - string(REPLACE / \\ SGROUP ${ITEM}) - source_group(${SGROUP} FILES ${FLS}) -endforeach(ITEM) - - -set(OXYGINE_INCLUDE_DIRS - ${OXYGINE_SRC} - ${THIRD_PARTY}/pthreads/include/ - ${THIRD_PARTY}/zlib) - - -set(OXYGINE_LIBRARY_DIRS - ${OXYGINE_LIBRARY_DIRS} - ${OXYGINE_SOURCE_DIR}/libs - ${THIRD_PARTY}/libraries) - - -if (FORCE_GLES) - set(OPENGL_LIBRARIES libGLESv2.lib) -endif(FORCE_GLES) - - -if (MINGW) - set(CORE_LIBS ${CORE_LIBS} mingw32) -endif(MINGW) - - -set(CORE_LIBS - ${CORE_LIBS} - oxygine-framework - ${OPENGL_LIBRARIES} -) - - -if (OX_USE_SDL2) - set(CORE_LIBS ${CORE_LIBS} - SDL2main SDL2) - set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) -endif(OX_USE_SDL2) - - -if (WIN32) - set(CORE_LIBS ${CORE_LIBS} - pthreadVCE2 - libcurl_imp - ws2_32) -elseif(EMSCRIPTEN) -else(WIN32) - set(CORE_LIBS ${CORE_LIBS} pthread) -endif(WIN32) - - - -if (OX_HAVE_LIBPNG) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) - - if (EMSCRIPTEN) - - else(EMSCRIPTEN) - - include_directories(${THIRD_PARTY}/libpng) - set(LIBPNG ${libprefix}png) - - if (MSVC) - if(NOT (MSVC_VERSION LESS 1900)) - set(LIBPNG ${LIBPNG}-2015) - endif() - endif() - - set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) - - endif(EMSCRIPTEN) - -endif(OX_HAVE_LIBPNG) - - -if (OX_HAVE_LIBJPEG) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) - include_directories(${THIRD_PARTY}/libjpeg) - set(LIBJPEG ${libprefix}jpeg) - - if (MSVC) - if(NOT (MSVC_VERSION LESS 1900)) - set(LIBJPEG ${LIBJPEG}-2015) - endif() - endif() - - set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) -endif(OX_HAVE_LIBJPEG) - - -if (NOT EMSCRIPTEN) - set(CORE_LIBS ${CORE_LIBS} - ${libprefix}z${libprefix}) -endif(NOT EMSCRIPTEN) - - -if (NOT MSVC) - set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -std=c++11 ") -endif(NOT MSVC) - -set(CMAKE_CXX_FLAGS ${OXYGINE_CXX_FLAGS}) - -add_definitions(${OX_DEFINITIONS}) -include_directories(${OXYGINE_INCLUDE_DIRS}) -add_library(oxygine-framework STATIC ${SOURCES}) - - -set(OXYGINE_LIBRARY_DIRS - ${OXYGINE_LIBRARY_DIRS} - PARENT_SCOPE) - -set(OXYGINE_CORE_LIBS - ${CORE_LIBS} - PARENT_SCOPE) - -set(OXYGINE_DEFINITIONS - ${OX_DEFINITIONS} - PARENT_SCOPE) - -set(OXYGINE_INCLUDE_DIRS - ${OXYGINE_INCLUDE_DIRS} - PARENT_SCOPE) - -set(OXYGINE_JS_LIBRARIES - ${OXYGINE_JS_LIBRARIES} - PARENT_SCOPE) - -set(OXYGINE_CXX_FLAGS - ${OXYGINE_CXX_FLAGS} - PARENT_SCOPE) - - - -message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") -message(STATUS "Libs: ${CORE_LIBS}") -message(STATUS "Platform: ${PLATFORM}") - -set(CMAKE_INSTALL_PREFIX ../libs) -install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) -install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml deleted file mode 100644 index bbdc966..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml +++ /dev/null @@ -1,13 +0,0 @@ -- assimp: - <<: *thirdparty_defaults - version: 3.1.1.0 - mode: dr - source: http://downloads.sourceforge.net/project/assimp/assimp-3.1/assimp-3.1.1.zip - uncompress_strip: assimp-3.1.1 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - targets: - - assimp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml deleted file mode 100644 index e2fe3a4..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml +++ /dev/null @@ -1,23 +0,0 @@ -- box2d: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - cmake_target: null - cmake_prefix: ./Box2D/CMakeLists.txt - cmake_definitions: - - BOX2D_BUILD_EXAMPLES=OFF - - BUILD_SHARED_LIBS=ON - - BOX2D_BUILD_SHARED=ON - - BOX2D_BUILD_STATIC=OFF - # - CMAKE_POSITION_INDEPENDENT_CODE=ON - post_install: - - ./Box2D/Box2D/*.h include/Box2D/ RECURSIVE - - ./Box2D/libBox2D.a lib/ - mode: dr - source: https://github.com/erincatto/Box2D.git - branch: -b v2.3.1 - targets: - - Box2D: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml deleted file mode 100644 index a33a569..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml +++ /dev/null @@ -1,54 +0,0 @@ -- bullet2: - <<: *thirdparty_defaults - version: 2.83.6.0 - source: https://github.com/bulletphysics/bullet3/archive/2.83.6.tar.gz - uncompress_strip: bullet3-2.83.6 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - references: &bullet2_common_extra - default: - include: - - include/bullet - targets: - - LinearMath: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletCollision: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletDynamics: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletSoftBody: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - # optional targets - - BulletFileLoader: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - ConvexDecomposition: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - GIMPACTUtils: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - HACD: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml deleted file mode 100644 index 59a451e..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml +++ /dev/null @@ -1,70 +0,0 @@ -- cryptopp: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - post_install: - - ./*.h include - - ./*.a lib - cmake_target: null - mode: dr - source: https://github.com/weidai11/cryptopp.git - branch: -b CRYPTOPP_5_6_5 - unittest: - | - // https://www.cryptopp.com/wiki/ChannelSwitch - #include - #include - #include - #include - #include - #include - - int main(int argc, char *argv[]) - { - std::string message = "Now is the time for all good men to come to the aide of their country"; - - // Allow user to override default message from command line arg. - if(argc == 2 && argv[1] != NULL) - message = std::string(argv[1]); - - // Set hash variables - std::string s1, s2, s3, s4; - CryptoPP::SHA1 sha1; CryptoPP::SHA224 sha224; CryptoPP::SHA256 sha256; CryptoPP::SHA512 sha512; - - // Run hash functions - CryptoPP::HashFilter f1(sha1, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s1))); - CryptoPP::HashFilter f2(sha224, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s2))); - CryptoPP::HashFilter f3(sha256, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s3))); - CryptoPP::HashFilter f4(sha512, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s4))); - - // Set route to default - CryptoPP::ChannelSwitch cs; - cs.AddDefaultRoute(f1); - cs.AddDefaultRoute(f2); - cs.AddDefaultRoute(f3); - cs.AddDefaultRoute(f4); - - CryptoPP::StringSource ss(message, true /*pumpAll*/, new CryptoPP::Redirector(cs)); - - std::cout << "Message: " << message << std::endl; - std::cout << "SHA-1: " << s1 << std::endl; - std::cout << "SHA-224: " << s2 << std::endl; - std::cout << "SHA-256: " << s3 << std::endl; - std::cout << "SHA-512: " << s4 << std::endl; - } - cmake_definitions: - - BUILD_SHARED=OFF - - BUILD_SHARED_LIBS=OFF - - BUILD_STATIC=ON - - BUILD_TESTING=OFF - targets: - - cryptopp: - info: - <<: *library_static_exact - extra: - default: - definitions: - - -DCRYPTOPP_INIT_PRIORITY=1 - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml deleted file mode 100644 index 9ebf7cf..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml +++ /dev/null @@ -1,28 +0,0 @@ -- dune-freetype: - <<: *thirdparty_defaults - version: 1.0.0.0 - mode: dr - source: http://download.savannah.gnu.org/releases/freetype/freetype-2.6.tar.bz2 - uncompress_strip: freetype-2.6 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - unittest: - | - #include - #include FT_FREETYPE_H - int main() - { - FT_Library library; - FT_Init_FreeType( &library ); - return 0; - } - targets: - - freetype: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/freetype2 - - $PLATFORM/include/freetype2 - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml deleted file mode 100644 index ccb589b..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml +++ /dev/null @@ -1,29 +0,0 @@ -- dune-glew: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - cmake_target: null - mode: dr - source: https://github.com/nigels-com/glew.git - cmake_definitions: - - BUILD_SHARED_LIBS=ON - post_install: - - ./lib/* lib/ RECURSIVE - - ./include/* include/ RECURSIVE - build: - | - #!/bin/bash - pushd auto - make - popd - make -j $CORES - targets: - - GLEW: - info: - <<: *library_dynamic_exact - extra: - macos_64-clang_*-*: null - default: - system_depends: - - GL - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml deleted file mode 100644 index 04246cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml +++ /dev/null @@ -1,38 +0,0 @@ -- dune-zlib: - <<: *thirdparty_defaults - version: 1.2.11.0 - mask: w - source: https://zlib.net/zlib-1.2.11.tar.gz - uncompress_strip: zlib-1.2.11 - unittest: - | - #include - int main() - { - z_stream infstream; - return 0; - } - targets: - - zlib: - info: - <<: *library_dynamic_exact - -- dune-zlib: - <<: *thirdparty_defaults - version: 1.2.11.0 - mask: mls - source: https://zlib.net/zlib-1.2.11.tar.gz - uncompress_strip: zlib-1.2.11 - unittest: - | - #include - int main() - { - z_stream infstream; - return 0; - } - targets: - - z: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml deleted file mode 100644 index 1dc4f97..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml +++ /dev/null @@ -1,20 +0,0 @@ -- fmod: - <<: *thirdparty_defaults - version: 1.0.1.0 - source: $NPP_SERVER/sources/fmodstudioapi11000linux.tar.gz - uncompress_strip: fmodstudioapi11000linux/api/lowlevel - post_install: - - ./lib/x86_64/* lib/ - - ./inc/*.h* include/ - build: - | - #!/bin/bash - echo installing fmod - targets: - - fmod: - info: - <<: *library_dynamic_exact - - fmodL: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml deleted file mode 100644 index 856f116..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml +++ /dev/null @@ -1,36 +0,0 @@ -- freeimage: - <<: *thirdparty_defaults - version: 3.1.7.0 - source: https://github.com/Kanma/FreeImage - cmake_target: null - post_install: - - ./lib/*.a lib/ - targets: - - freeimage: - info: - <<: *library_static_exact - - jpeg: - info: - <<: *library_static_exact - - mng: - info: - <<: *library_static_exact - - openexr: - info: - <<: *library_static_exact - - openjpeg: - info: - <<: *library_static_exact - - png: - info: - <<: *library_static_exact - - rawlite: - info: - <<: *library_static_exact - - tiff: - info: - <<: *library_static_exact - - zlib: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml deleted file mode 100644 index c9352be..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml +++ /dev/null @@ -1,40 +0,0 @@ -- freeimage: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - cmake_target: null - post_install: - - ./*.h include RECURSIVE - - ./lib/*.a lib - mode: dr - source: https://github.com/Kanma/FreeImage.git - targets: - - freeimage: - info: - <<: *library_static_exact - # - zlib: - # info: - # <<: *library_static_exact - - tiff: - info: - <<: *library_static_exact - - rawlite: - info: - <<: *library_static_exact - - png: - info: - <<: *library_static_exact - - openjpeg: - info: - <<: *library_static_exact - - openexr: - info: - <<: *library_static_exact - - mng: - info: - <<: *library_static_exact - - jpeg: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml deleted file mode 100644 index cf94535..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml +++ /dev/null @@ -1,61 +0,0 @@ -- google-gmock: - <<: *thirdparty_defaults - mask: w - source: https://github.com/google/googletest.git - branch: -b release-1.8.0 - post_install: - - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE - - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE - cmake_definitions: - - GTEST_LINKED_AS_SHARED_LIBRARY=1 - - BUILD_SHARED_LIBS=ON - - BUILD_GTEST=ON - - BUILD_GMOCK=ON - - gtest_build_samples=OFF - - gtest_build_tests=OFF - - gtest_disable_pthreads=OFF - - gmock_build_tests=OFF - - INSTALL_GTEST=ON - - INSTALL_GMOCK=ON - targets: - - gtest: - info: - <<: *library_dynamic_exact - - gmock: - info: - <<: *library_dynamic_exact - - gmock_main: - info: - <<: *library_dynamic_exact - - -- google-gmock: - <<: *thirdparty_defaults - mask: mls - source: https://github.com/google/googletest.git - branch: -b release-1.8.0 - post_install: - - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE - - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE - cmake_definitions: - - BUILD_SHARED_LIBS=OFF - - BUILD_GTEST=ON - - BUILD_GMOCK=ON - - gtest_build_samples=OFF - - gtest_build_tests=OFF - - gtest_disable_pthreads=OFF - - gmock_build_tests=OFF - - INSTALL_GTEST=ON - - INSTALL_GMOCK=ON - targets: - - gtest: - info: - <<: *library_static_exact - - gmock: - info: - <<: *library_static_exact - - gmock_main: - info: - <<: *library_static_exact - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml deleted file mode 100644 index ffd8870..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml +++ /dev/null @@ -1,11 +0,0 @@ -- gwen: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - source: https://github.com/garrynewman/GWEN.git - targets: - - gwen: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml deleted file mode 100644 index 8c14ec5..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml +++ /dev/null @@ -1,71 +0,0 @@ -- haxx-libcurl: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/curl/curl.git - branch: -b curl-7_59_0 - depends: - - openssl - cmake_definitions: - - CMAKE_USE_OPENSSL=ON - unittest: - | - #include - #include - int main() - { - CURL* curl = curl_easy_init(); - return 0; - } - references: - library_dynamic: &library_dynamic_curl - common: &library_dynamic_common_curl - include: - - $PLATFORM/include - - include - windows: &library_dynamic_windows_curl - <<: *library_dynamic_common_curl - dynamic: - debug: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - lib$TARGET.pdb - relwithdebinfo: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - lib$TARGET.pdb - release: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - null - - unix: &library_dynamic_unix_curl - <<: *library_dynamic_common_curl - dynamic: - debug: - so: - lib/lib$TARGET-d.so - relwithdebinfo: - so: - lib/lib$TARGET.so - release: - so: - lib/lib$TARGET.so - windows_*-msvc_*-*: - <<: *library_dynamic_windows_curl - default: - <<: *library_dynamic_unix_curl - targets: - - curl: - info: - <<: *library_dynamic_curl - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml deleted file mode 100644 index e8920b1..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml +++ /dev/null @@ -1,26 +0,0 @@ -- json: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - post_install: - - ./src/*.h* include - cmake_target: null - source: https://github.com/nlohmann/json.git - branch: -b v3.0.1 - cmake_definitions: - - JSON_BuildTests=OFF - unittest: - | - #include - using json = nlohmann::json; - int main() - { - json j1; - return 0; - } - targets: - - dummy: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml deleted file mode 100644 index 05d54dd..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml +++ /dev/null @@ -1,24 +0,0 @@ -- librocket: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - cmake_target: null - post_install: - - ./Include/Rocket/*.h include/Rocket/ recursive - - ./Include/Rocket/*.inl include/Rocket/ recursive - version_manager: git - source: https://github.com/libRocket/libRocket.git - branch: -b stable - depends: - - dune-freetype - targets: - - RocketCore: - info: - <<: *library_dynamic_exact - - RocketDebugger: - info: - <<: *library_dynamic_exact - - RocketControls: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml deleted file mode 100644 index 7d76144..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml +++ /dev/null @@ -1,10 +0,0 @@ -- msgpack: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/msgpack/msgpack-c.git - targets: - - msgpackc: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml deleted file mode 100644 index 4cbfa70..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml +++ /dev/null @@ -1,11 +0,0 @@ -- noise: - <<: *thirdparty_defaults - version: 1.0.0.0 - mode: dr - source: http://downloads.sourceforge.net/project/libnoise/libnoise%20sources/1.0.0/libnoisesrc-1.0.0.zip - uncompress_strip: noise - targets: - - noise: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml deleted file mode 100644 index 06bada0..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml +++ /dev/null @@ -1,19 +0,0 @@ -- ois: - <<: *thirdparty_defaults - version: 1.3.0.0 - mode: dr - source: http://downloads.sourceforge.net/project/wgois/Source%20Release/1.3/ois_v1-3.tar.gz - uncompress_strip: ois-v1-3 - build: - | - #!/bin/bash - # depends: libxaw7-dev - source find.script - chmod +x bootstrap - ./bootstrap - ./configure --prefix=$ois_HOME && make -j $CORES && make -j $CORES install - exit $? - targets: - - OIS: - info: - <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml deleted file mode 100644 index 4011d09..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml +++ /dev/null @@ -1,24 +0,0 @@ -- openssl: - <<: *thirdparty_defaults - source: https://github.com/pol51/OpenSSL-CMake.git - branch: -b OpenSSL_1_1_0 - build: - | - #!/bin/bash - # if [[ $BUILD_MODE == 'Debug' ]] - # then - # ./Configure --openssldir=$SELFHOME debug-linux-x86_64 - # else - # ./Configure --openssldir=$SELFHOME linux-x86_64 - # fi - ./config --prefix=$SELFHOME - make - make install - targets: - - ssl: - info: - <<: *library_static_exact - - crypto: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml deleted file mode 100644 index eb53ab4..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml +++ /dev/null @@ -1,25 +0,0 @@ -- oxygine: - <<: *thirdparty_defaults - cmake_target: null - cmake_definitions: - - BUILD_SHARED_LIBS=OFF - - CMAKE_POSITION_INDEPENDENT_CODE=ON - - OX_HAVE_LIBJPEG=1 - - OX_HAVE_LIBPNG=1 - - OX_HAVE_LIBCURL=1 - - OX_HAVE_HTTP=0 - - OX_USE_SDL2=1 - depends: - - sdl2 - - freeimage - - haxx-libcurl - source: https://github.com/oxygine/oxygine-framework.git - targets: - - oxygine-framework: - info: - <<: *library_static_exact - extra: - default: - definitions: - - -DOXYGINE_SDL=1 - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml deleted file mode 100644 index 0d9c5f9..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml +++ /dev/null @@ -1,22 +0,0 @@ -- paho-mqtt3: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - post_install: - - ./src/*.h include - source: https://github.com/eclipse/paho.mqtt.c.git - branch: -b develop - cmake_definitions: - - BUILD_SHARED_LIBS=ON - - BUILD_TESTING=OFF - - BUILD_STATIC=OFF - - BUILD_SHARED=ON - targets: - - paho-mqtt3c: - info: - <<: *library_dynamic_exact - - paho-mqtt3a: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml deleted file mode 100644 index 5d52565..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml +++ /dev/null @@ -1,21 +0,0 @@ -- paho-mqttpp3: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - depends: - - paho-mqtt3 - post_install: - - ./src/mqtt/*.h include/mqtt - mode: dr - source: https://github.com/eclipse/paho.mqtt.cpp.git - cmake_definitions: - - BUILD_SHARED_LIBS=ON - - BUILD_TESTING=OFF - - BUILD_STATIC=OFF - - BUILD_SHARED=ON - - PAHO_WITH_SSL=OFF - targets: - - paho-mqttpp3: - info: - <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml deleted file mode 100644 index df8c388..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml +++ /dev/null @@ -1,11 +0,0 @@ -- pugixml: - <<: *thirdparty_defaults - source: http://github.com/zeux/pugixml/releases/download/v1.8/pugixml-1.8.tar.gz - uncompress_strip: pugixml-1.8 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - targets: - - pugixml: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml deleted file mode 100644 index bc7cb10..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml +++ /dev/null @@ -1,21 +0,0 @@ -- python: - <<: *thirdparty_defaults - source: https://github.com/python-cmake-buildsystem/python-cmake-buildsystem.git - cmake_definitions: - - BUILD_SHARED=FALSE - - BUILD_STATIC=TRUE - targets: - - python3.5m: - info: - <<: *library_static_exact - extra: - default: - include: - - include/python3.5m - system_depends: - - dl - - util - - python: - info: - <<: *executable_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml deleted file mode 100644 index 643b0c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml +++ /dev/null @@ -1,11 +0,0 @@ -- raknet: - <<: *thirdparty_defaults - cmake_target: null - source: https://github.com/facebookarchive/RakNet.git - post_install: - - ./Source/*.h* include/raknet/ - targets: - - RakNetDLL: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml deleted file mode 100644 index 5707070..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml +++ /dev/null @@ -1,17 +0,0 @@ -- restclient-cpp: - <<: *thirdparty_defaults - source: https://github.com/mrtazz/restclient-cpp - depends: - - haxx-libcurl - build: - | - #!/bin/bash - source $(pwd)/../haxx-libcurl/find.script - ./autogen.sh - CXXFLAGS=-I$haxx_libcurl_HOME/include ./configure --prefix=$SELFHOME - make install - targets: - - restclient-cpp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml deleted file mode 100644 index 13d07b4..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml +++ /dev/null @@ -1,38 +0,0 @@ -- sdl2: - <<: *thirdparty_defaults - mask: wl - version: 2.0.8.0 - source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz - uncompress_strip: SDL2-2.0.8 - depends: - - dune-glew - mode: dr - targets: - - SDL2-2.0: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/SDL2 - - -- sdl2: - <<: *thirdparty_defaults - mask: m - version: 2.0.8.0 - source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz - uncompress_strip: SDL2-2.0.8 - depends: - - dune-glew - mode: dr - targets: - - SDL2: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/SDL2 - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml deleted file mode 100644 index 29c143d..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml +++ /dev/null @@ -1,14 +0,0 @@ -- spdlog: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/gabime/spdlog.git - branch: -b v0.16.3 - post_install: - - ./include/*.h* include/ RECURSIVE - - ./include/*.cc* include/ RECURSIVE - targets: - - dummy: - info: - <<: *library_dynamic - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml deleted file mode 100644 index d01d5e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml +++ /dev/null @@ -1,49 +0,0 @@ -- intel-tbb: - <<: *thirdparty_defaults - version: 4.4.0.0 - source: https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20150728oss_src.tgz - uncompress_strip: tbb44_20150728oss - build: - | - #!/bin/bash - source find.script - make info > info_.txt - tail -n +2 info_.txt > info.txt - source info.txt - make - code=$? - # install - cp -Rf include/ $intel_tbb_HOME - if [[ $BUILD_MODE == 'Debug' ]] - then - cp -Rf build/${tbb_build_prefix}_debug/*.so* $intel_tbb_HOME - else - cp -Rf build/${tbb_build_prefix}_release/*.so* $intel_tbb_HOME - fi - for i in $(find $intel_tbb_HOME -name "*.so"); do - name=$(basename $i) - echo rm $i - echo ln -sf $name.2 $i - rm $i - ln -sf $name.2 $i - done - exit $code - - targets: - - tbb: - info: - <<: *library_dynamic_exact - extra: - \*-debug: - definitions: - - -DTBB_USE_DEBUG=1 - default: - definitions: - - -DTBB_USE_DEBUG=0 - - tbbmalloc: - info: - <<: *library_dynamic_exact - - tbbmalloc_proxy: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml b/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml deleted file mode 100644 index 34d5cc9..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml +++ /dev/null @@ -1,16 +0,0 @@ -- yamlcpp: - <<: *thirdparty_defaults - mode: dr - version: 0.0.0.0 - version_manager: git - cmake_target: null - cmake_definitions: - - BUILD_SHARED_LIBS=ON - post_install: - - ./include/yaml-cpp/*.h include/yaml-cpp RECURSIVE - source: https://github.com/jbeder/yaml-cpp.git - targets: - - yaml-cpp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_generator/packing.py b/node_modules/npm-mas-mas/cmaki_generator/packing.py deleted file mode 100644 index fcb2872..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/packing.py +++ /dev/null @@ -1,139 +0,0 @@ -import os -import sys -import utils -import logging -import hash_version -from itertools import product -from third_party import platforms -from third_party import get_identifier - - -def print_folder(folder): - for root, dirs, files in os.walk(folder): - path = root.split(os.sep) - logging.info((len(path) - 1) * '... ' + '%s/' % os.path.basename(root)) - for file in files: - logging.info(len(path) * '... ' + '%s' % file) - - -def packing(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - version_git = node.get_version() - packing = node.is_packing() - if not packing: - logging.warning('Skiping package: %s' % package) - return 0 - - manager = node.get_version_manager() - if manager == "git": - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - revision_git = hash_version.get_last_changeset(build_directory, short=False) - version_old = node.get_version() - version_git = hash_version.to_cmaki_version(build_directory, revision_git) - logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) - - current_workspace = node.get_binary_workspace(plat) - current_base = node.get_base_folder() - oldversion = node.get_version() - try: - node.set_version(version_git) - updated_workspace = node.get_binary_workspace(plat) - updated_base = node.get_base_folder() - - current_base2 = os.path.join(current_workspace, current_base) - updated_base2 = os.path.join(current_workspace, updated_base) - logging.debug("from: %s" % current_base2) - logging.debug("to: %s" % updated_base2) - if current_base != updated_base: - utils.move_folder_recursive(current_base2, updated_base2) - logging.debug('-- copy from: {}, {}'.format(current_workspace, os.path.exists(current_workspace))) - logging.debug('-- copy to: {}, {}'.format(updated_workspace, os.path.exists(updated_workspace))) - utils.move_folder_recursive(current_workspace, updated_workspace) - finally: - node.set_version(oldversion) - - node.set_version(version_git) - version = node.get_version() - - # regenerate autoscripts with new version - node.generate_scripts_headers(compiler_replace_maps) - - # # generate versions.cmake - node.generate_3rdpartyversion(parameters.prefix) - - precmd = '' - if utils.is_windows(): - precmd = 'cmake -E ' - - folder_3rdparty = parameters.third_party_dir - output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) - utils.trymkdir(output_3rdparty) - - folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) - utils.trymkdir(folder_mark) - - utils.superverbose(parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) - errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) - logging.debug('errors generating cmakefiles: %d' % errors) - node.ret += abs(errors) - - for plat in platforms: - utils.superverbose(parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) - workspace = node.get_workspace(plat) - current_workspace = node.get_binary_workspace(plat) - utils.trymkdir(current_workspace) - with utils.working_directory(current_workspace): - - logging.info('working directory: {}'.format(current_workspace)) - - if utils.is_windows(): - utils.safe_system('del /s *.ilk') - utils.safe_system('del /s *.exp') - - current_base = node.get_base_folder() - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) - prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) - - logging.info('generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), current_base))) - logging.info('generating md5file %s' % prefix_package_md5) - print_folder(current_base) - - # packing install - gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, current_base) - - node.ret += abs( node.safe_system(gen_targz, compiler_replace_maps) ) - if not os.path.exists(prefix_package): - logging.error('No such file: {}'.format(prefix_package)) - return False - - # calculate md5 file - package_md5 = utils.md5sum(prefix_package) - logging.debug("new package {}, with md5sum {}".format(prefix_package, package_md5)) - with open(prefix_package_md5, 'wt') as f: - f.write('%s\n' % package_md5) - - # packing cmakefiles (more easy distribution) - if not parameters.no_packing_cmakefiles: - for plat in platforms: - current_base = node.get_base_folder() - prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (current_base, plat)) - with utils.working_directory(folder_3rdparty): - - logging.info('working directory: {}'.format(folder_3rdparty)) - - logging.debug('working dir: %s' % folder_3rdparty) - logging.info('generating package cmake %s' % prefix_package_cmake) - print_folder(current_base) - - gen_targz_cmake = '{}tar zcvf {} {}'.format(precmd, prefix_package_cmake, current_base) - node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps) ) - if not os.path.exists(prefix_package_cmake): - logging.error('No such file: {}'.format(prefix_package_cmake)) - return False - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt deleted file mode 100644 index dcb2251..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt +++ /dev/null @@ -1,75 +0,0 @@ - -#******************************************************************************* -# Copyright (c) 2016 -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Eclipse Public License v1.0 -# and Eclipse Distribution License v1.0 which accompany this distribution. -# -# The Eclipse Public License is available at -# http://www.eclipse.org/legal/epl-v10.html -# and the Eclipse Distribution License is available at -# http://www.eclipse.org/org/documents/edl-v10.php. -# -# Contributors: -# Guilherme Maciel Ferreira - initial version -#*******************************************************************************/ - -## Note: on OS X you should install XCode and the associated command-line tools - -## cmake flags -cmake_minimum_required(VERSION 3.1 FATAL_ERROR) - -## project name -project("paho-mqtt-cpp" LANGUAGES CXX) - -include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) -set(PAHO_MQTT_C_PATH "${paho_mqtt3_LIBDIR}" CACHE PATH "Add a path to paho.mqtt.c library and headers") - -## library name -set(PAHO_MQTT_CPP paho-mqttpp3) - -## build settings -set(PAHO_VERSION_MAJOR 0) -set(PAHO_VERSION_MINOR 9) -set(PAHO_VERSION_PATCH 0) - -set(CLIENT_VERSION ${PAHO_VERSION_MAJOR}.${PAHO_VERSION_MINOR}.${PAHO_VERSION_PATCH}) -set(CPACK_PACKAGE_VERSION_MAJOR ${PAHO_VERSION_MAJOR}) -set(CPACK_PACKAGE_VERSION_MINOR ${PAHO_VERSION_MINOR}) -set(CPACK_PACKAGE_VERSION_PATCH ${PAHO_VERSION_PATCH}) - -## build options -set(PAHO_BUILD_STATIC FALSE CACHE BOOL "Build static library") -set(PAHO_BUILD_SAMPLES FALSE CACHE BOOL "Build sample programs") -set(PAHO_BUILD_DOCUMENTATION FALSE CACHE BOOL "Create and install the HTML based API documentation (requires Doxygen)") -set(PAHO_MQTT_C paho-mqtt3a) -SET(PAHO_WITH_SSL TRUE CACHE BOOL "Flag that defines whether to build ssl-enabled binaries too. ") - -## build flags -set(CMAKE_CXX_STANDARD 11) -set(CMAKE_CXX_STANDARD_REQUIRED ON) -set(CMAKE_CXX_EXTENSIONS OFF) - -## build directories - -add_subdirectory(src) -add_subdirectory(src/mqtt) - -if(PAHO_BUILD_SAMPLES) - add_subdirectory(src/samples) -endif() - -if(PAHO_BUILD_DOCUMENTATION) - add_subdirectory(doc) -endif() - -## packaging settings -if(WIN32) - set(CPACK_GENERATOR "ZIP") -elseif(UNIX) - set(CPACK_GENERATOR "TGZ") -endif() - -include(CPack) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt deleted file mode 100644 index d35ab8b..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt +++ /dev/null @@ -1,161 +0,0 @@ -#******************************************************************************* -# Copyright (c) 2016 -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Eclipse Public License v1.0 -# and Eclipse Distribution License v1.0 which accompany this distribution. -# -# The Eclipse Public License is available at -# http://www.eclipse.org/legal/epl-v10.html -# and the Eclipse Distribution License is available at -# http://www.eclipse.org/org/documents/edl-v10.php. -# -# Contributors: -# Guilherme Maciel Ferreira - initial version -#*******************************************************************************/ - -## Note: on OS X you should install XCode and the associated command-line tools - -include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) -set(paho_mqtt3_LIBRARIES paho-mqtt3c paho-mqtt3a) -link_directories("${paho_mqtt3_LIBDIR}") -include_directories("${paho_mqtt3_INCLUDE}") -# TODO: use find_package -# find_package(paho-mqtt3 REQUIRED) - -## include directories -include_directories(${CMAKE_CURRENT_SOURCE_DIR}) - - -## libraries -if(WIN32) - set(LIBS_SYSTEM - ws2_32) -elseif(UNIX) - if(CMAKE_SYSTEM_NAME MATCHES "Linux") - set(LIB_DL dl) - endif() - set(LIBS_SYSTEM - ${LIB_DL} - c - stdc++ - pthread) -endif() - -## use Object Library to optimize compilation -set(COMMON_SRC - async_client.cpp - client.cpp - disconnect_options.cpp - iclient_persistence.cpp - message.cpp - response_options.cpp - ssl_options.cpp - string_collection.cpp - token.cpp - topic.cpp - connect_options.cpp - will_options.cpp) - -if(PAHO_WITH_SSL) - add_definitions(-DOPENSSL) -endif() - -add_library(common_obj OBJECT - ${COMMON_SRC}) - -## set position independent flag (-fPIC on Unix) -set_property(TARGET common_obj - PROPERTY POSITION_INDEPENDENT_CODE ON) - -## create the shared library -add_library(${PAHO_MQTT_CPP} SHARED - $) - -## add dependencies to the shared library -target_link_libraries(${PAHO_MQTT_CPP} - ${LIBS_SYSTEM}) - -## set the shared library soname -set_target_properties(${PAHO_MQTT_CPP} PROPERTIES - VERSION ${CLIENT_VERSION} - SOVERSION ${PAHO_VERSION_MAJOR}) - -## install the shared library -install(TARGETS ${PAHO_MQTT_CPP} - ARCHIVE DESTINATION lib - LIBRARY DESTINATION lib - RUNTIME DESTINATION bin) - -## build static version of the Paho MQTT C++ library -if(PAHO_BUILD_STATIC) - ## create the static library - add_library(${PAHO_MQTT_CPP}-static STATIC - $) - - ## add dependencies to the static library - target_link_libraries(${PAHO_MQTT_CPP}-static - ${LIBS_SYSTEM}) - - ## install the static library - install(TARGETS ${PAHO_MQTT_CPP}-static - ARCHIVE DESTINATION lib - LIBRARY DESTINATION lib) -endif() - -## extract Paho MQTT C include directory -get_filename_component(PAHO_MQTT_C_DEV_INC_DIR ${PAHO_MQTT_C_PATH}/src ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD_INC_DIR ${PAHO_MQTT_C_PATH}/include ABSOLUTE) -set(PAHO_MQTT_C_INC_DIR - ${PAHO_MQTT_C_DEV_INC_DIR} - ${PAHO_MQTT_C_STD_INC_DIR}) - -## extract Paho MQTT C library directory -get_filename_component(PAHO_MQTT_C_DEV_LIB_DIR ${PAHO_MQTT_C_PATH}/build/output ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD_LIB_DIR ${PAHO_MQTT_C_PATH}/lib ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD64_LIB_DIR ${PAHO_MQTT_C_PATH}/lib64 ABSOLUTE) -set(PAHO_MQTT_C_LIB_DIR - ${PAHO_MQTT_C_DEV_LIB_DIR} - ${PAHO_MQTT_C_STD_LIB_DIR} - ${PAHO_MQTT_C_STD64_LIB_DIR}) - -## extract Paho MQTT C binary directory (Windows may place libraries there) -get_filename_component(PAHO_MQTT_C_BIN_DIR ${PAHO_MQTT_C_PATH}/bin ABSOLUTE) - -## add library suffixes so Windows can find Paho DLLs -set(CMAKE_FIND_LIBRARY_PREFIXES ${CMAKE_FIND_LIBRARY_PREFIXES} "") -set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES} ".dll" ".lib") - -if(PAHO_WITH_SSL) - ## find the Paho MQTT C SSL library - find_library(PAHO_MQTT_C_LIB - NAMES paho-mqtt3as - mqtt3as - PATHS ${PAHO_MQTT_C_LIB_DIR} - ${PAHO_MQTT_C_BIN_DIR}) - - find_package(OpenSSL REQUIRED) -else() - ## find the Paho MQTT C library - find_library(PAHO_MQTT_C_LIB - NAMES paho-mqtt3a - mqtt - paho-mqtt - mqtt3 - paho-mqtt3 - mqtt3a - PATHS ${PAHO_MQTT_C_LIB_DIR} - ${PAHO_MQTT_C_BIN_DIR}) -endif() - -## use the Paho MQTT C library if found. Otherwise terminate the compilation -if(${PAHO_MQTT_C_LIB} STREQUAL "PAHO_MQTT_C_LIB-NOTFOUND") - message(FATAL_ERROR "Could not find Paho MQTT C library") -else() - include_directories(${PAHO_MQTT_C_INC_DIR}) - link_directories(${PAHO_MQTT_C_LIB_DIR}) - target_link_libraries(${PAHO_MQTT_CPP} - ${PAHO_MQTT_C_LIB} - ${paho_mqtt3_LIBRARIES}) -endif() - diff --git a/node_modules/npm-mas-mas/cmaki_generator/pipeline.py b/node_modules/npm-mas-mas/cmaki_generator/pipeline.py deleted file mode 100644 index d0c44ed..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/pipeline.py +++ /dev/null @@ -1,287 +0,0 @@ -import os -import sys -import logging -import contextlib -import utils -import shutil -from third_party import exceptions_fail_group -from third_party import exceptions_fail_program -from third_party import FailThirdParty - - -def make_pipe(): - def process(): - pass - return process - - -def end_pipe(): - def process(p): - _ = list(p) - return process - - -def _create(): - b = make_pipe() - e = yield b - end_pipe()(e) - yield - - -@contextlib.contextmanager -def create(): - c = _create() - p = next(c) - yield (p, c) - - -def feed(packages): - def process(_): - for node in packages: - yield node - return process - - -def do(function, force, *args, **kwargs): - ''' - skeleton gtc stage - ''' - def process(packages): - def _process(): - for node in packages: - try: - package = node.get_package_name() - version = node.get_version() - - if not force: - # skip process if package came with error - if node.ret != 0: - logging.info('%s %s error detected: skiping' % (function.__name__, package)) - continue - - # skip process if package came interrupted - if node.interrupted: - logging.info('%s %s error detected: skiping' % (function.__name__, package)) - continue - - if function.__name__ != 'purge': - logger_function = logging.info - else: - logger_function = logging.debug - - logger_function('--------- begin@%s: %s (%s) --------' % (function.__name__, package, version)) - - # process package - ret = function(node, *args, **kwargs) - logging.debug('%s: return %s' % (function.__name__, ret)) - if isinstance(ret, bool): - if not ret: - node.ret += 1 - elif isinstance(ret, int): - # aggregation result - node.ret += abs(ret) - else: - logging.error('%s %s error invalid return: %s' % (function.__name__, package, ret)) - node.ret += 1 - - logger_function('--------- end@%s: %s (%s) --------' % (function.__name__, package, version)) - - if node.ret != 0: - node.fail_stage = function.__name__ - raise FailThirdParty('[exception] %s fail in stage: %s' % (package, function.__name__)) - - except FailThirdParty: - logging.error('fatal exception in package %s (%s)' % (package, version)) - node.ret += 1 - node.fail_stage = function.__name__ - raise - except exceptions_fail_group: - logging.error('fatal exception in package %s (%s)' % (package, version)) - node.ret += 1 - # add exception for show postponed - node.exceptions.append(sys.exc_info()) - node.fail_stage = function.__name__ - raise - except exceptions_fail_program: - logging.error('interruption in package %s (%s)' % (package, version)) - node.ret += 1 - node.fail_stage = function.__name__ - node.interrupted = True - raise - except: - # excepciones por fallos de programacion - logging.error('Postponed exception in package %s (%s)' % (package, version)) - node.ret += 1 - node.exceptions.append(sys.exc_info()) - node.fail_stage = function.__name__ - finally: - # send to next step - yield node - - for node in _process(): - yield node - return process - -####################### PIPELINE PROOF CONCEPT (UNDER CODE IS NOT USED) ############### - - -def echo(line): - def process(_): - yield line - return process - - -def cat(): - def process(p): - for line in p: - if(os.path.exists(line)): - with open(line, 'rt') as f: - for line2 in f: - yield line2 - else: - logging.warning(' filename %s not exists' % line) - return process - - -def find(folder, level=999): - def process(_): - for root, dirs, files in utils.walklevel(folder, level): - for name in files: - yield os.path.join(root, name) - return process - - -def grep(pattern): - def process(p): - for line in p: - if line.find(pattern) != -1: - yield line - return process - - -def grep_basename(pattern): - def process(p): - p0 = pattern[:1] - pL = pattern[-1:] - fixed_pattern = pattern.replace('*', '') - for line in p: - if(p0 == '*' and pL != '*'): - if os.path.basename(line).endswith(fixed_pattern): - yield line.replace('\\', '/') - elif(p0 != '*' and pL == '*'): - if os.path.basename(line).startswith(fixed_pattern): - yield line.replace('\\', '/') - else: - if os.path.basename(line).find(fixed_pattern) != -1: - yield line.replace('\\', '/') - return process - - -def grep_v(pattern): - def process(p): - for line in p: - if line.find(pattern) == -1: - yield line - return process - - -def endswith(pattern): - def process(p): - for line in p: - if line.endswith(pattern): - yield line - return process - - -def copy(rootdir, folder): - def process(p): - for line in p: - relfilename = os.path.relpath(line, rootdir) - destiny = os.path.join(folder, relfilename) - destiny_dir = os.path.dirname(destiny) - utils.trymkdir(destiny_dir) - shutil.copyfile(line, destiny) - if not os.path.exists(destiny): - raise Exception("Not exists %s" % destiny) - yield destiny - return process - - -def startswith(pattern): - def process(p): - for line in p: - if line.startswith(pattern): - yield line - return process - - -def printf(prefix = ''): - def process(p): - for line in p: - print("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def info(prefix = ''): - def process(p): - for line in p: - logging.info("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def debug(prefix = ''): - def process(p): - for line in p: - logging.debug("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def write_file(filename, mode='wt'): - def process(p): - content = [] - for line in p: - content.append(line) - with open(filename, mode) as f: - for line in content: - f.write('%s\n' % line.rstrip()) - for line in content: - yield line - return process - - -def tee(filename): - def process(p): - p = printf()(p) - p = write_file(filename)(p) - for line in p: - yield line - return process - - -def example_context(): - # using context - with create() as (p, finisher): - p = find('.')(p) - p = endswith('.cpp')(p) - p = cat()(p) - p = tee('result.txt')(p) - # send last part - finisher.send(p) - - -def example_simple(): - # not using context - p = make_pipe() - # begin - p = find('.', 2)(p) - p = endswith('.yml')(p) - p = grep_v('.build_')(p) - p = tee('result.txt')(p) - # end - end_pipe()(p) - -if __name__ == '__main__': - example_simple() diff --git a/node_modules/npm-mas-mas/cmaki_generator/prepare.py b/node_modules/npm-mas-mas/cmaki_generator/prepare.py deleted file mode 100644 index d15de46..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/prepare.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -import sys -import utils -import logging -import shutil -from third_party import platforms -from third_party import build_unittests_foldername -from itertools import product -from third_party import prefered - - -def prepare(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - - # source folder - source_dir = os.path.join(os.getcwd(), package) - utils.trymkdir(source_dir) - - # generate .build.sh / .build.cmd if is defined in yaml - node.get_generate_custom_script(source_dir) - - # generate find.script / find.cmd - node.generate_scripts_headers(compiler_replace_maps) - - # read root CMakeLists.txt - with open('CMakeLists.txt', 'rt') as f: - content_cmakelists = f.read() - - # OJO: dejar de borrar cuando reciclemos binarios - node.remove_packages() - - # run_tests or packing - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - logging.info('Preparing mode %s - %s' % (plat, build_mode)) - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - utils.trymkdir(build_directory) - - # download source and prepare in build_directory - node.prepare_third_party(build_directory, compiler_replace_maps) - - # copy source files to build - logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) - utils.copy_folder_recursive(source_dir, build_directory) - - # before copy files - with utils.working_directory(build_directory): - for bc in node.get_before_copy(): - chunks = [x.strip() for x in bc.split(' ') if x] - if len(chunks) != 2: - raise Exception('Invalid value in before_copy: %s' % bc) - logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) - shutil.copy2(chunks[0], chunks[1]) - - # if have cmakelists, insert root cmakelists header - cmake_prefix = node.get_cmake_prefix() - build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') - if os.path.exists(build_cmakelist) and (not node.has_custom_script(source_dir)): - with open(build_cmakelist, 'rt') as f: - content_cmakelists_package = f.read() - with open(build_cmakelist, 'wt') as f: - f.write('%s\n' % content_cmakelists) - f.write('%s\n' % content_cmakelists_package) - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "prepare"') - break - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_generator/purge.py b/node_modules/npm-mas-mas/cmaki_generator/purge.py deleted file mode 100644 index 2349465..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/purge.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import utils -import logging -from third_party import platforms - -def purge(node, parameters): - - package = node.get_package_name() - - logging.debug("Cleaning headers and cmakefiles %s" % package) - node.remove_scripts_headers() - node.remove_cmakefiles() - - logging.debug("Cleaning download %s" % package) - uncompress_directory = node.get_download_directory() - utils.tryremove_dir(uncompress_directory) - - original_directory = node.get_original_directory() - utils.tryremove_dir(original_directory) - - for plat in platforms: - - if not node.get_exclude_from_clean(): - logging.debug("Cleaning install %s" % package) - utils.tryremove_dir(node.get_install_directory(plat)) - - build_modes = node.get_build_modes() - for build_mode in build_modes: - - logging.debug("Cleaning build %s" % package) - build_directory = node.get_build_directory(plat, build_mode) - utils.tryremove_dir(build_directory) - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt deleted file mode 100644 index 618b3f8..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt +++ /dev/null @@ -1,34 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(RakNetLibStatic) - -FILE(GLOB ALL_HEADER_SRCS ${RakNet_SOURCE_DIR}/Source/*.h) -FILE(GLOB ALL_CPP_SRCS ${RakNet_SOURCE_DIR}/Source/*.cpp) - -include_directories( ${RAKNET_INTERNAL_INCLUDE_DIRS} ) - -add_library(RakNetLibStatic STATIC ${ALL_CPP_SRCS} ${ALL_HEADER_SRCS} readme.txt) - -IF(WIN32 AND NOT UNIX) - SET( CMAKE_CXX_FLAGS "/D WIN32 /D _RAKNET_LIB /D _CRT_NONSTDC_NO_DEPRECATE /D _CRT_SECURE_NO_DEPRECATE /GS- /GR- ") -ENDIF(WIN32 AND NOT UNIX) - -IF(WIN32 AND NOT UNIX) - target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) - - IF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") - - IF( MSVC10 OR MSVC11 OR MSVC12 OR MSVC14 ) - set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:\"LIBCD.lib LIBCMTD.lib MSVCRT.lib\"" ) - ELSE() - set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:"LIBCD.lib LIBCMTD.lib MSVCRT.lib"" ) - ENDIF() - - ENDIF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") - -ELSE(WIN32 AND NOT UNIX) - target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) - INSTALL(TARGETS RakNetLibStatic DESTINATION ${RakNet_SOURCE_DIR}/Lib/RakNetLibStatic) - INSTALL(FILES ${ALL_HEADER_SRCS} DESTINATION ${RakNet_SOURCE_DIR}/include/raknet) -ENDIF(WIN32 AND NOT UNIX) - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp b/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp deleted file mode 100644 index 8f20dfa..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Copyright (c) 2014, Oculus VR, Inc. - * All rights reserved. - * - * This source code is licensed under the BSD-style license found in the - * LICENSE file in the root directory of this source tree. An additional grant - * of patent rights can be found in the PATENTS file in the same directory. - * - */ - -#include "CCRakNetSlidingWindow.h" - -#if USE_SLIDING_WINDOW_CONGESTION_CONTROL==1 - -static const double UNSET_TIME_US=-1; - -#if CC_TIME_TYPE_BYTES==4 -static const CCTimeType SYN=10; -#else -static const CCTimeType SYN=10000; -#endif - -#include "MTUSize.h" -#include -#include -#include -#include "RakAssert.h" -#include "RakAlloca.h" - -using namespace RakNet; - -// ****************************************************** PUBLIC METHODS ****************************************************** - -CCRakNetSlidingWindow::CCRakNetSlidingWindow() -{ -} -// ---------------------------------------------------------------------------------------------------------------------------- -CCRakNetSlidingWindow::~CCRakNetSlidingWindow() -{ - -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::Init(CCTimeType curTime, uint32_t maxDatagramPayload) -{ - (void) curTime; - - lastRtt=estimatedRTT=deviationRtt=UNSET_TIME_US; - RakAssert(maxDatagramPayload <= MAXIMUM_MTU_SIZE); - MAXIMUM_MTU_INCLUDING_UDP_HEADER=maxDatagramPayload; - cwnd=maxDatagramPayload; - ssThresh=0.0; - oldestUnsentAck=0; - nextDatagramSequenceNumber=0; - nextCongestionControlBlock=0; - backoffThisBlock=speedUpThisBlock=false; - expectedNextSequenceNumber=0; - _isContinuousSend=false; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::Update(CCTimeType curTime, bool hasDataToSendOrResend) -{ - (void) curTime; - (void) hasDataToSendOrResend; -} -// ---------------------------------------------------------------------------------------------------------------------------- -int CCRakNetSlidingWindow::GetRetransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) -{ - (void) curTime; - (void) isContinuousSend; - (void) timeSinceLastTick; - - return unacknowledgedBytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -int CCRakNetSlidingWindow::GetTransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) -{ - (void) curTime; - (void) timeSinceLastTick; - - _isContinuousSend=isContinuousSend; - - if (unacknowledgedBytes<=cwnd) - return (int) (cwnd-unacknowledgedBytes); - else - return 0; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::ShouldSendACKs(CCTimeType curTime, CCTimeType estimatedTimeToNextTick) -{ - CCTimeType rto = GetSenderRTOForACK(); - (void) estimatedTimeToNextTick; - - // iphone crashes on comparison between double and int64 http://www.jenkinssoftware.com/forum/index.php?topic=2717.0 - if (rto==(CCTimeType) UNSET_TIME_US) - { - // Unknown how long until the remote system will retransmit, so better send right away - return true; - } - - return curTime >= oldestUnsentAck + SYN; -} -// ---------------------------------------------------------------------------------------------------------------------------- -DatagramSequenceNumberType CCRakNetSlidingWindow::GetNextDatagramSequenceNumber(void) -{ - return nextDatagramSequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -DatagramSequenceNumberType CCRakNetSlidingWindow::GetAndIncrementNextDatagramSequenceNumber(void) -{ - DatagramSequenceNumberType dsnt=nextDatagramSequenceNumber; - nextDatagramSequenceNumber++; - return dsnt; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendBytes(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnGotPacketPair(DatagramSequenceNumberType datagramSequenceNumber, uint32_t sizeInBytes, CCTimeType curTime) -{ - (void) curTime; - (void) sizeInBytes; - (void) datagramSequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::OnGotPacket(DatagramSequenceNumberType datagramSequenceNumber, bool isContinuousSend, CCTimeType curTime, uint32_t sizeInBytes, uint32_t *skippedMessageCount) -{ - (void) curTime; - (void) sizeInBytes; - (void) isContinuousSend; - - if (oldestUnsentAck==0) - oldestUnsentAck=curTime; - - if (datagramSequenceNumber==expectedNextSequenceNumber) - { - *skippedMessageCount=0; - expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; - } - else if (GreaterThan(datagramSequenceNumber, expectedNextSequenceNumber)) - { - *skippedMessageCount=datagramSequenceNumber-expectedNextSequenceNumber; - // Sanity check, just use timeout resend if this was really valid - if (*skippedMessageCount>1000) - { - // During testing, the nat punchthrough server got 51200 on the first packet. I have no idea where this comes from, but has happened twice - if (*skippedMessageCount>(uint32_t)50000) - return false; - *skippedMessageCount=1000; - } - expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; - } - else - { - *skippedMessageCount=0; - } - - return true; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnResend(CCTimeType curTime, RakNet::TimeUS nextActionTime) -{ - (void) curTime; - (void) nextActionTime; - - if (_isContinuousSend && backoffThisBlock==false && cwnd>MAXIMUM_MTU_INCLUDING_UDP_HEADER*2) - { - // Spec says 1/2 cwnd, but it never recovers because cwnd increases too slowly - //ssThresh=cwnd-8.0 * (MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd); - ssThresh=cwnd/2; - if (ssThresh ssThresh && ssThresh!=0) - cwnd = ssThresh + MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; - - // CC PRINTF - // printf("++ %.0f Slow start increase.\n", cwnd); - - } - else if (isNewCongestionControlPeriod) - { - cwnd+=MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; - - // CC PRINTF - // printf("+ %.0f Congestion avoidance increase.\n", cwnd); - } -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnDuplicateAck( CCTimeType curTime, DatagramSequenceNumberType sequenceNumber ) -{ - (void) curTime; - (void) sequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendAckGetBAndAS(CCTimeType curTime, bool *hasBAndAS, BytesPerMicrosecond *_B, BytesPerMicrosecond *_AS) -{ - (void) curTime; - (void) _B; - (void) _AS; - - *hasBAndAS=false; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendAck(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; - - oldestUnsentAck=0; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendNACK(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; - -} -// ---------------------------------------------------------------------------------------------------------------------------- -CCTimeType CCRakNetSlidingWindow::GetRTOForRetransmission(unsigned char timesSent) const -{ - (void) timesSent; - -#if CC_TIME_TYPE_BYTES==4 - const CCTimeType maxThreshold=2000; - //const CCTimeType minThreshold=100; - const CCTimeType additionalVariance=30; -#else - const CCTimeType maxThreshold=2000000; - //const CCTimeType minThreshold=100000; - const CCTimeType additionalVariance=30000; -#endif - - - if (estimatedRTT==UNSET_TIME_US) - return maxThreshold; - - //double u=1.0f; - double u=2.0f; - double q=4.0f; - - CCTimeType threshhold = (CCTimeType) (u * estimatedRTT + q * deviationRtt) + additionalVariance; - if (threshhold > maxThreshold) - return maxThreshold; - return threshhold; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::SetMTU(uint32_t bytes) -{ - RakAssert(bytes < MAXIMUM_MTU_SIZE); - MAXIMUM_MTU_INCLUDING_UDP_HEADER=bytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -uint32_t CCRakNetSlidingWindow::GetMTU(void) const -{ - return MAXIMUM_MTU_INCLUDING_UDP_HEADER; -} -// ---------------------------------------------------------------------------------------------------------------------------- -BytesPerMicrosecond CCRakNetSlidingWindow::GetLocalReceiveRate(CCTimeType currentTime) const -{ - (void) currentTime; - - return 0; // TODO -} -// ---------------------------------------------------------------------------------------------------------------------------- -double CCRakNetSlidingWindow::GetRTT(void) const -{ - if (lastRtt==UNSET_TIME_US) - return 0.0; - return lastRtt; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::GreaterThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) -{ - // a > b? - const DatagramSequenceNumberType halfSpan =(DatagramSequenceNumberType) (((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2); - return b!=a && b-a>halfSpan; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::LessThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) -{ - // a < b? - const DatagramSequenceNumberType halfSpan = ((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2; - return b!=a && b-aGetNetworkID() < data->replica->GetNetworkID()) - return -1; - if (replica3->GetNetworkID() > data->replica->GetNetworkID()) - return 1; - */ - - // 7/28/2013 - If GetNetworkID chagned during runtime, the list would be out of order and lookup would always fail or go out of bounds - // I remember before that I could not directly compare - if (replica3->referenceIndex < data->replica->referenceIndex) - return -1; - if (replica3->referenceIndex > data->replica->referenceIndex) - return 1; - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -LastSerializationResult::LastSerializationResult() -{ - replica=0; - lastSerializationResultBS=0; - whenLastSerialized = RakNet::GetTime(); -} -LastSerializationResult::~LastSerializationResult() -{ - if (lastSerializationResultBS) - RakNet::OP_DELETE(lastSerializationResultBS,_FILE_AND_LINE_); -} -void LastSerializationResult::AllocBS(void) -{ - if (lastSerializationResultBS==0) - { - lastSerializationResultBS=RakNet::OP_NEW(_FILE_AND_LINE_); - } -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::ReplicaManager3() -{ - defaultSendParameters.orderingChannel=0; - defaultSendParameters.priority=HIGH_PRIORITY; - defaultSendParameters.reliability=RELIABLE_ORDERED; - defaultSendParameters.sendReceipt=0; - autoSerializeInterval=30; - lastAutoSerializeOccurance=0; - autoCreateConnections=true; - autoDestroyConnections=true; - currentlyDeallocatingReplica=0; - - for (unsigned int i=0; i < 255; i++) - worldsArray[i]=0; - - AddWorld(0); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::~ReplicaManager3() -{ - if (autoDestroyConnections) - { - for (unsigned int i=0; i < worldsList.Size(); i++) - { - RakAssert(worldsList[i]->connectionList.Size()==0); - } - } - Clear(true); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetAutoManageConnections(bool autoCreate, bool autoDestroy) -{ - autoCreateConnections=autoCreate; - autoDestroyConnections=autoDestroy; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAutoCreateConnections(void) const -{ - return autoCreateConnections; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAutoDestroyConnections(void) const -{ - return autoDestroyConnections; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::AutoCreateConnectionList( - DataStructures::List &participantListIn, - DataStructures::List &participantListOut, - WorldId worldId) -{ - for (unsigned int index=0; index < participantListIn.Size(); index++) - { - if (GetConnectionByGUID(participantListIn[index], worldId)) - { - Connection_RM3 *connection = AllocConnection(rakPeerInterface->GetSystemAddressFromGuid(participantListIn[index]), participantListIn[index]); - if (connection) - { - PushConnection(connection); - participantListOut.Push(connection, _FILE_AND_LINE_); - } - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::PushConnection(RakNet::Connection_RM3 *newConnection, WorldId worldId) -{ - if (newConnection==0) - return false; - if (GetConnectionByGUID(newConnection->GetRakNetGUID(), worldId)) - return false; - // Was this intended? - RakAssert(newConnection->GetRakNetGUID()!=rakPeerInterface->GetMyGUID()); - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index = world->connectionList.GetIndexOf(newConnection); - if (index==(unsigned int)-1) - { - world->connectionList.Push(newConnection,_FILE_AND_LINE_); - - // Send message to validate the connection - newConnection->SendValidation(rakPeerInterface, worldId); - - Connection_RM3::ConstructionMode constructionMode = newConnection->QueryConstructionMode(); - if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int pushIdx; - for (pushIdx=0; pushIdx < world->userReplicaList.Size(); pushIdx++) - newConnection->OnLocalReference(world->userReplicaList[pushIdx], this); - } - } - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::DeallocReplicaNoBroadcastDestruction(RakNet::Connection_RM3 *connection, RakNet::Replica3 *replica3) -{ - currentlyDeallocatingReplica=replica3; - replica3->DeallocReplica(connection); - currentlyDeallocatingReplica=0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNet::Connection_RM3 * ReplicaManager3::PopConnection(unsigned int index, WorldId worldId) -{ - DataStructures::List replicaList; - DataStructures::List destructionList; - DataStructures::List broadcastList; - RakNet::Connection_RM3 *connection; - unsigned int index2; - RM3ActionOnPopConnection action; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - connection=world->connectionList[index]; - - // Clear out downloadGroup - connection->ClearDownloadGroup(rakPeerInterface); - - RakNetGUID guid = connection->GetRakNetGUID(); - // This might be wrong, I am relying on the variable creatingSystemGuid which is transmitted - // automatically from the first system to reference the object. However, if an object changes - // owners then it is not going to be returned here, and therefore QueryActionOnPopConnection() - // will not be called for the new owner. - GetReplicasCreatedByGuid(guid, replicaList); - - for (index2=0; index2 < replicaList.Size(); index2++) - { - action = replicaList[index2]->QueryActionOnPopConnection(connection); - replicaList[index2]->OnPoppedConnection(connection); - if (action==RM3AOPC_DELETE_REPLICA) - { - if (replicaList[index2]->GetNetworkIDManager()) - destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); - } - else if (action==RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION) - { - if (replicaList[index2]->GetNetworkIDManager()) - destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); - - broadcastList.Push( replicaList[index2], _FILE_AND_LINE_ ); - } - else if (action==RM3AOPC_DO_NOTHING) - { - for (unsigned int index3 = 0; index3 < connection->queryToSerializeReplicaList.Size(); index3++) - { - LastSerializationResult *lsr = connection->queryToSerializeReplicaList[index3]; - lsr->whenLastSerialized=0; - if (lsr->lastSerializationResultBS) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - lsr->lastSerializationResultBS->bitStream[z].Reset(); - } - } - } - } - - BroadcastDestructionList(broadcastList, connection->GetSystemAddress()); - for (index2=0; index2 < destructionList.Size(); index2++) - { - // Do lookup in case DeallocReplica destroyed one of of the later Replica3 instances in the list - Replica3* replicaToDestroy = world->networkIDManager->GET_OBJECT_FROM_ID(destructionList[index2]); - if (replicaToDestroy) - { - replicaToDestroy->PreDestruction(connection); - replicaToDestroy->DeallocReplica(connection); - } - } - - world->connectionList.RemoveAtIndex(index); - return connection; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNet::Connection_RM3 * ReplicaManager3::PopConnection(RakNetGUID guid, WorldId worldId) -{ - unsigned int index; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetRakNetGUID()==guid) - { - return PopConnection(index, worldId); - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Reference(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index = ReferenceInternal(replica3, worldId); - - if (index!=(unsigned int)-1) - { - unsigned int pushIdx; - for (pushIdx=0; pushIdx < world->connectionList.Size(); pushIdx++) - { - Connection_RM3::ConstructionMode constructionMode = world->connectionList[pushIdx]->QueryConstructionMode(); - if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - world->connectionList[pushIdx]->OnLocalReference(replica3, this); - } - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned int ReplicaManager3::ReferenceInternal(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - index = world->userReplicaList.GetIndexOf(replica3); - if (index==(unsigned int)-1) - { - RakAssert(world->networkIDManager); - replica3->SetNetworkIDManager(world->networkIDManager); - // If it crashes on rakPeerInterface==0 then you didn't call RakPeerInterface::AttachPlugin() - if (replica3->creatingSystemGUID==UNASSIGNED_RAKNET_GUID) - replica3->creatingSystemGUID=rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - replica3->replicaManager=this; - if (replica3->referenceIndex==(uint32_t)-1) - { - replica3->referenceIndex=nextReferenceIndex++; - } - world->userReplicaList.Push(replica3,_FILE_AND_LINE_); - return world->userReplicaList.Size()-1; - } - return (unsigned int) -1; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Dereference(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index, index2; - for (index=0; index < world->userReplicaList.Size(); index++) - { - if (world->userReplicaList[index]==replica3) - { - world->userReplicaList.RemoveAtIndex(index); - break; - } - } - - // Remove from all connections - for (index2=0; index2 < world->connectionList.Size(); index2++) - { - world->connectionList[index2]->OnDereference(replica3, this); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::DereferenceList(DataStructures::List &replicaListIn, WorldId worldId) -{ - unsigned int index; - for (index=0; index < replicaListIn.Size(); index++) - Dereference(replicaListIn[index], worldId); -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReplicasCreatedByMe(DataStructures::List &replicaListOut, WorldId worldId) -{ - //RakNetGUID myGuid = rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - GetReplicasCreatedByGuid(rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS), replicaListOut, worldId); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReferencedReplicaList(DataStructures::List &replicaListOut, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - replicaListOut=world->userReplicaList; -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReplicasCreatedByGuid(RakNetGUID guid, DataStructures::List &replicaListOut, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - replicaListOut.Clear(false,_FILE_AND_LINE_); - unsigned int index; - for (index=0; index < world->userReplicaList.Size(); index++) - { - if (world->userReplicaList[index]->creatingSystemGUID==guid) - replicaListOut.Push(world->userReplicaList[index],_FILE_AND_LINE_); - } -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned ReplicaManager3::GetReplicaCount(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->userReplicaList.Size(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3 *ReplicaManager3::GetReplicaAtIndex(unsigned index, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->userReplicaList[index]; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned int ReplicaManager3::GetConnectionCount(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->connectionList.Size(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionAtIndex(unsigned index, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->connectionList[index]; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionBySystemAddress(const SystemAddress &sa, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetSystemAddress()==sa) - { - return world->connectionList[index]; - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionByGUID(RakNetGUID guid, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetRakNetGUID()==guid) - { - return world->connectionList[index]; - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultOrderingChannel(char def) -{ - defaultSendParameters.orderingChannel=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultPacketPriority(PacketPriority def) -{ - defaultSendParameters.priority=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultPacketReliability(PacketReliability def) -{ - defaultSendParameters.reliability=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetAutoSerializeInterval(RakNet::Time intervalMS) -{ - autoSerializeInterval=intervalMS; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetConnectionsThatHaveReplicaConstructed(Replica3 *replica, DataStructures::List &connectionsThatHaveConstructedThisReplica, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - connectionsThatHaveConstructedThisReplica.Clear(false,_FILE_AND_LINE_); - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->HasReplicaConstructed(replica)) - connectionsThatHaveConstructedThisReplica.Push(world->connectionList[index],_FILE_AND_LINE_); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAllConnectionDownloadsCompleted(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetDownloadWasCompleted()==false) - return false; - } - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Clear(bool deleteWorlds) -{ - for (unsigned int i=0; i < worldsList.Size(); i++) - { - worldsList[i]->Clear(this); - if (deleteWorlds) - { - worldsArray[worldsList[i]->worldId]=0; - delete worldsList[i]; - } - } - if (deleteWorlds) - worldsList.Clear(false, _FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::RM3World::RM3World() -{ - networkIDManager=0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::RM3World::Clear(ReplicaManager3 *replicaManager3) -{ - if (replicaManager3->GetAutoDestroyConnections()) - { - for (unsigned int i=0; i < connectionList.Size(); i++) - replicaManager3->DeallocConnection(connectionList[i]); - } - else - { - // Clear out downloadGroup even if not auto destroying the connection, since the packets need to go back to RakPeer - for (unsigned int i=0; i < connectionList.Size(); i++) - connectionList[i]->ClearDownloadGroup(replicaManager3->GetRakPeerInterface()); - } - - for (unsigned int i=0; i < userReplicaList.Size(); i++) - { - userReplicaList[i]->replicaManager=0; - userReplicaList[i]->SetNetworkIDManager(0); - } - connectionList.Clear(true,_FILE_AND_LINE_); - userReplicaList.Clear(true,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PRO ReplicaManager3::GetDefaultSendParameters(void) const -{ - return defaultSendParameters; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::AddWorld(WorldId worldId) -{ - RakAssert(worldsArray[worldId]==0 && "World already in use"); - - RM3World *newWorld = RakNet::OP_NEW(_FILE_AND_LINE_); - newWorld->worldId=worldId; - worldsArray[worldId]=newWorld; - worldsList.Push(newWorld,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::RemoveWorld(WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - for (unsigned int i=0; i < worldsList.Size(); i++) - { - if (worldsList[i]==worldsArray[worldId]) - { - RakNet::OP_DELETE(worldsList[i],_FILE_AND_LINE_); - worldsList.RemoveAtIndexFast(i); - break; - } - } - worldsArray[worldId]=0; - -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -NetworkIDManager *ReplicaManager3::GetNetworkIDManager(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->networkIDManager; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetNetworkIDManager(NetworkIDManager *_networkIDManager, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - world->networkIDManager=_networkIDManager; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnReceive(Packet *packet) -{ - if (packet->length<2) - return RR_CONTINUE_PROCESSING; - - WorldId incomingWorldId; - - RakNet::Time timestamp=0; - unsigned char packetIdentifier, packetDataOffset; - if ( ( unsigned char ) packet->data[ 0 ] == ID_TIMESTAMP ) - { - if ( packet->length > sizeof( unsigned char ) + sizeof( RakNet::Time ) ) - { - packetIdentifier = ( unsigned char ) packet->data[ sizeof( unsigned char ) + sizeof( RakNet::Time ) ]; - // Required for proper endian swapping - RakNet::BitStream tsBs(packet->data+sizeof(MessageID),packet->length-1,false); - tsBs.Read(timestamp); - // Next line assumes worldId is only 1 byte - RakAssert(sizeof(WorldId)==1); - incomingWorldId=packet->data[sizeof( unsigned char )*2 + sizeof( RakNet::Time )]; - packetDataOffset=sizeof( unsigned char )*3 + sizeof( RakNet::Time ); - } - else - return RR_STOP_PROCESSING_AND_DEALLOCATE; - } - else - { - packetIdentifier = ( unsigned char ) packet->data[ 0 ]; - // Next line assumes worldId is only 1 byte - RakAssert(sizeof(WorldId)==1); - incomingWorldId=packet->data[sizeof( unsigned char )]; - packetDataOffset=sizeof( unsigned char )*2; - } - - if (worldsArray[incomingWorldId]==0) - return RR_CONTINUE_PROCESSING; - - switch (packetIdentifier) - { - case ID_REPLICA_MANAGER_CONSTRUCTION: - return OnConstruction(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - case ID_REPLICA_MANAGER_SERIALIZE: - return OnSerialize(packet, packet->data, packet->length, packet->guid, timestamp, packetDataOffset, incomingWorldId); - case ID_REPLICA_MANAGER_DOWNLOAD_STARTED: - if (packet->wasGeneratedLocally==false) - { - return OnDownloadStarted(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - } - else - break; - case ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE: - if (packet->wasGeneratedLocally==false) - { - return OnDownloadComplete(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - } - else - break; - case ID_REPLICA_MANAGER_SCOPE_CHANGE: - { - Connection_RM3 *connection = GetConnectionByGUID(packet->guid, incomingWorldId); - if (connection && connection->isValidated==false) - { - // This connection is now confirmed bidirectional - connection->isValidated=true; - // Reply back on validation - connection->SendValidation(rakPeerInterface,incomingWorldId); - } - } - } - - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::AutoConstructByQuery(ReplicaManager3 *replicaManager3, WorldId worldId) -{ - ValidateLists(replicaManager3); - - ConstructionMode constructionMode = QueryConstructionMode(); - - unsigned int index; - RM3ConstructionState constructionState; - LastSerializationResult *lsr; - index=0; - - constructedReplicasCulled.Clear(false,_FILE_AND_LINE_); - destroyedReplicasCulled.Clear(false,_FILE_AND_LINE_); - - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - while (index < queryToConstructReplicaList.Size()) - { - lsr=queryToConstructReplicaList[index]; - constructionState=lsr->replica->QueryConstruction(this, replicaManager3); - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY || constructionState==RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT) - { - OnReplicaAlreadyExists(index, replicaManager3); - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) - constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - - /* - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) - { - // Serialize construction data to this connection - RakNet::BitStream bsOut; - bsOut.Write((MessageID)ID_REPLICA_MANAGER_3_SERIALIZE_CONSTRUCTION_EXISTING); - bsOut.Write(replicaManager3->GetWorldID()); - NetworkID networkId; - networkId=lsr->replica->GetNetworkID(); - bsOut.Write(networkId); - BitSize_t bitsWritten = bsOut.GetNumberOfBitsUsed(); - lsr->replica->SerializeConstructionExisting(&bsOut, this); - if (bsOut.GetNumberOfBitsUsed()!=bitsWritten) - replicaManager3->SendUnified(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,GetSystemAddress(), false); - } - - // Serialize first serialization to this connection. - // This is done here, as it isn't done in PushConstruction - SerializeParameters sp; - RakNet::BitStream emptyBs; - for (index=0; index < (unsigned int) RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) - { - sp.lastSentBitstream[index]=&emptyBs; - sp.pro[index]=replicaManager3->GetDefaultSendParameters(); - } - sp.bitsWrittenSoFar=0; - sp.destinationConnection=this; - sp.messageTimestamp=0; - sp.whenLastSerialized=0; - - RakNet::Replica3 *replica = lsr->replica; - - RM3SerializationResult res = replica->Serialize(&sp); - if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && - res!=RM3SR_DO_NOT_SERIALIZE && - res!=RM3SR_SERIALIZED_UNIQUELY) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - } - if (SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, replicaManager3->GetRakPeerInterface(), replicaManager3->GetWorldID())==SSICR_SENT_DATA) - lsr->replica->whenLastSerialized=RakNet::GetTimeMS(); - } - */ - } - else if (constructionState==RM3CS_SEND_CONSTRUCTION) - { - OnConstructToThisConnection(index, replicaManager3); - RakAssert(lsr->replica); - constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - } - else if (constructionState==RM3CS_NEVER_CONSTRUCT) - { - OnNeverConstruct(index, replicaManager3); - } - else// if (constructionState==RM3CS_NO_ACTION) - { - // Do nothing - index++; - } - } - - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - RM3DestructionState destructionState; - index=0; - while (index < queryToDestructReplicaList.Size()) - { - lsr=queryToDestructReplicaList[index]; - destructionState=lsr->replica->QueryDestruction(this, replicaManager3); - if (destructionState==RM3DS_SEND_DESTRUCTION) - { - OnSendDestructionFromQuery(index, replicaManager3); - destroyedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - } - else if (destructionState==RM3DS_DO_NOT_QUERY_DESTRUCTION) - { - OnDoNotQueryDestruction(index, replicaManager3); - } - else// if (destructionState==RM3CS_NO_ACTION) - { - // Do nothing - index++; - } - } - } - } - else if (constructionMode==QUERY_CONNECTION_FOR_REPLICA_LIST) - { - QueryReplicaList(constructedReplicasCulled,destroyedReplicasCulled); - - unsigned int idx1, idx2; - - // Create new - for (idx2=0; idx2 < constructedReplicasCulled.Size(); idx2++) - OnConstructToThisConnection(constructedReplicasCulled[idx2], replicaManager3); - - bool exists; - for (idx2=0; idx2 < destroyedReplicasCulled.Size(); idx2++) - { - exists=false; - bool objectExists; - idx1=constructedReplicaList.GetIndexFromKey(destroyedReplicasCulled[idx2], &objectExists); - if (objectExists) - { - constructedReplicaList.RemoveAtIndex(idx1); - - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]->replica==destroyedReplicasCulled[idx2] ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - } - } - } - - SendConstruction(constructedReplicasCulled,destroyedReplicasCulled,replicaManager3->defaultSendParameters,replicaManager3->rakPeerInterface,worldId,replicaManager3); -} -void ReplicaManager3::Update(void) -{ - unsigned int index,index2,index3; - - WorldId worldId; - RM3World *world; - RakNet::Time time = RakNet::GetTime(); - - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - worldId = world->worldId; - - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->isValidated==false) - continue; - world->connectionList[index]->AutoConstructByQuery(this, worldId); - } - } - - if (time - lastAutoSerializeOccurance >= autoSerializeInterval) - { - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - worldId = world->worldId; - - for (index=0; index < world->userReplicaList.Size(); index++) - { - world->userReplicaList[index]->forceSendUntilNextUpdate=false; - world->userReplicaList[index]->OnUserReplicaPreSerializeTick(); - } - - unsigned int index; - SerializeParameters sp; - sp.curTime=time; - Connection_RM3 *connection; - SendSerializeIfChangedResult ssicr; - LastSerializationResult *lsr; - - sp.messageTimestamp=0; - for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) - sp.pro[i]=defaultSendParameters; - index2=0; - for (index=0; index < world->connectionList.Size(); index++) - { - connection = world->connectionList[index]; - sp.bitsWrittenSoFar=0; - index2=0; - sp.destinationConnection=connection; - - DataStructures::List replicasToSerialize; - replicasToSerialize.Clear(true, _FILE_AND_LINE_); - if (connection->QuerySerializationList(replicasToSerialize)) - { - // Update replica->lsr so we can lookup in the next block - // lsr is per connection / per replica - while (index2 < connection->queryToSerializeReplicaList.Size()) - { - connection->queryToSerializeReplicaList[index2]->replica->lsr=connection->queryToSerializeReplicaList[index2]; - index2++; - } - - - // User is manually specifying list of replicas to serialize - index2=0; - while (index2 < replicasToSerialize.Size()) - { - lsr=replicasToSerialize[index2]->lsr; - RakAssert(lsr->replica==replicasToSerialize[index2]); - - sp.whenLastSerialized=lsr->whenLastSerialized; - ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); - if (ssicr==SSICR_SENT_DATA) - lsr->whenLastSerialized=time; - index2++; - } - } - else - { - while (index2 < connection->queryToSerializeReplicaList.Size()) - { - lsr=connection->queryToSerializeReplicaList[index2]; - - sp.destinationConnection=connection; - sp.whenLastSerialized=lsr->whenLastSerialized; - ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); - if (ssicr==SSICR_SENT_DATA) - { - lsr->whenLastSerialized=time; - index2++; - } - else if (ssicr==SSICR_NEVER_SERIALIZE) - { - // Removed from the middle of the list - } - else - index2++; - } - } - } - } - - lastAutoSerializeOccurance=time; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnClosedConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason ) -{ - (void) lostConnectionReason; - (void) systemAddress; - if (autoDestroyConnections) - { - Connection_RM3 *connection = PopConnection(rakNetGUID); - if (connection) - DeallocConnection(connection); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnNewConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, bool isIncoming) -{ - (void) isIncoming; - if (autoCreateConnections) - { - Connection_RM3 *connection = AllocConnection(systemAddress, rakNetGUID); - if (connection) - PushConnection(connection); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnRakPeerShutdown(void) -{ - if (autoDestroyConnections) - { - RM3World *world; - unsigned int index3; - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - - while (world->connectionList.Size()) - { - Connection_RM3 *connection = PopConnection(world->connectionList.Size()-1, world->worldId); - if (connection) - DeallocConnection(connection); - } - } - } - - - Clear(false); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnDetach(void) -{ - OnRakPeerShutdown(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnConstruction(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - RM3World *world = worldsArray[worldId]; - - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - { - // Almost certainly a bug - RakAssert("Got OnConstruction but no connection yet" && 0); - return RR_CONTINUE_PROCESSING; - } - if (connection->groupConstructionAndSerialize) - { - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - uint16_t constructionObjectListSize, destructionObjectListSize, index, index2; - BitSize_t streamEnd, writeAllocationIDEnd; - Replica3 *replica; - NetworkID networkId; - RakNetGUID creatingSystemGuid; - bool actuallyCreateObject=false; - - DataStructures::List actuallyCreateObjectList; - DataStructures::List constructionTickStack; - - RakAssert(world->networkIDManager); - - bsIn.Read(constructionObjectListSize); - for (index=0; index < constructionObjectListSize; index++) - { - bsIn.Read(streamEnd); - bsIn.Read(networkId); - Replica3* existingReplica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - bsIn.Read(actuallyCreateObject); - actuallyCreateObjectList.Push(actuallyCreateObject, _FILE_AND_LINE_); - bsIn.AlignReadToByteBoundary(); - - if (actuallyCreateObject) - { - bsIn.Read(creatingSystemGuid); - bsIn.Read(writeAllocationIDEnd); - - //printf("OnConstruction: %i\n",networkId.guid.g); // Removeme - if (existingReplica) - { - existingReplica->replicaManager=this; - - // Network ID already in use - connection->OnDownloadExisting(existingReplica, this); - - constructionTickStack.Push(0, _FILE_AND_LINE_); - bsIn.SetReadOffset(streamEnd); - continue; - } - - bsIn.AlignReadToByteBoundary(); - replica = connection->AllocReplica(&bsIn, this); - if (replica==0) - { - constructionTickStack.Push(0, _FILE_AND_LINE_); - bsIn.SetReadOffset(streamEnd); - continue; - } - - // Go past the bitStream written to with WriteAllocationID(). Necessary in case the user didn't read out the bitStream the same way it was written - // bitOffset2 is already aligned - bsIn.SetReadOffset(writeAllocationIDEnd); - - replica->SetNetworkIDManager(world->networkIDManager); - replica->SetNetworkID(networkId); - - replica->replicaManager=this; - replica->creatingSystemGUID=creatingSystemGuid; - - if (!replica->QueryRemoteConstruction(connection) || - !replica->DeserializeConstruction(&bsIn, connection)) - { - DeallocReplicaNoBroadcastDestruction(connection, replica); - bsIn.SetReadOffset(streamEnd); - constructionTickStack.Push(0, _FILE_AND_LINE_); - continue; - } - - constructionTickStack.Push(replica, _FILE_AND_LINE_); - - // Register the replica - ReferenceInternal(replica, worldId); - } - else - { - if (existingReplica) - { - existingReplica->DeserializeConstructionExisting(&bsIn, connection); - constructionTickStack.Push(existingReplica, _FILE_AND_LINE_); - } - else - { - constructionTickStack.Push(0, _FILE_AND_LINE_); - } - } - - - bsIn.SetReadOffset(streamEnd); - bsIn.AlignReadToByteBoundary(); - } - - RakAssert(constructionTickStack.Size()==constructionObjectListSize); - RakAssert(actuallyCreateObjectList.Size()==constructionObjectListSize); - - RakNet::BitStream empty; - for (index=0; index < constructionObjectListSize; index++) - { - bool pdcWritten=false; - bsIn.Read(pdcWritten); - if (pdcWritten) - { - bsIn.AlignReadToByteBoundary(); - bsIn.Read(streamEnd); - bsIn.Read(networkId); - if (constructionTickStack[index]!=0) - { - bsIn.AlignReadToByteBoundary(); - if (actuallyCreateObjectList[index]) - constructionTickStack[index]->PostDeserializeConstruction(&bsIn, connection); - else - constructionTickStack[index]->PostDeserializeConstructionExisting(&bsIn, connection); - } - bsIn.SetReadOffset(streamEnd); - } - else - { - if (constructionTickStack[index]!=0) - { - if (actuallyCreateObjectList[index]) - constructionTickStack[index]->PostDeserializeConstruction(&empty, connection); - else - constructionTickStack[index]->PostDeserializeConstructionExisting(&empty, connection); - } - } - } - - for (index=0; index < constructionObjectListSize; index++) - { - if (constructionTickStack[index]!=0) - { - if (actuallyCreateObjectList[index]) - { - // Tell the connection(s) that this object exists since they just sent it to us - connection->OnDownloadFromThisSystem(constructionTickStack[index], this); - - for (index2=0; index2 < world->connectionList.Size(); index2++) - { - if (world->connectionList[index2]!=connection) - world->connectionList[index2]->OnDownloadFromOtherSystem(constructionTickStack[index], this); - } - } - } - } - - // Destructions - bool b = bsIn.Read(destructionObjectListSize); - (void) b; - RakAssert(b); - for (index=0; index < destructionObjectListSize; index++) - { - bsIn.Read(networkId); - bsIn.Read(streamEnd); - replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - if (replica==0) - { - // Unknown object - bsIn.SetReadOffset(streamEnd); - continue; - } - bsIn.Read(replica->deletingSystemGUID); - if (replica->DeserializeDestruction(&bsIn,connection)) - { - // Make sure it wasn't deleted in DeserializeDestruction - if (world->networkIDManager->GET_OBJECT_FROM_ID(networkId)) - { - replica->PreDestruction(connection); - - // Forward deletion by remote system - if (replica->QueryRelayDestruction(connection)) - BroadcastDestruction(replica,connection->GetSystemAddress()); - Dereference(replica); - DeallocReplicaNoBroadcastDestruction(connection, replica); - } - } - else - { - replica->PreDestruction(connection); - connection->OnDereference(replica, this); - } - - bsIn.AlignReadToByteBoundary(); - } - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnSerialize(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, RakNet::Time timestamp, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - if (connection->groupConstructionAndSerialize) - { - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - RM3World *world = worldsArray[worldId]; - RakAssert(world->networkIDManager); - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - - struct DeserializeParameters ds; - ds.timeStamp=timestamp; - ds.sourceConnection=connection; - - Replica3 *replica; - NetworkID networkId; - BitSize_t bitsUsed; - bsIn.Read(networkId); - //printf("OnSerialize: %i\n",networkId.guid.g); // Removeme - replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - if (replica) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - bsIn.Read(ds.bitstreamWrittenTo[z]); - if (ds.bitstreamWrittenTo[z]) - { - bsIn.ReadCompressed(bitsUsed); - bsIn.AlignReadToByteBoundary(); - bsIn.Read(ds.serializationBitstream[z], bitsUsed); - } - } - replica->Deserialize(&ds); - } - return RR_CONTINUE_PROCESSING; -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnDownloadStarted(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - if (connection->QueryGroupDownloadMessages() && - // ID_DOWNLOAD_STARTED will be processed twice, being processed the second time once ID_DOWNLOAD_COMPLETE arrives. - // However, the second time groupConstructionAndSerialize will be set to true so it won't be processed a third time - connection->groupConstructionAndSerialize==false - ) - { - // These messages will be held by the plugin and returned when the download is complete - connection->groupConstructionAndSerialize=true; - RakAssert(connection->downloadGroup.Size()==0); - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - connection->groupConstructionAndSerialize=false; - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - connection->DeserializeOnDownloadStarted(&bsIn); - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnDownloadComplete(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - - if (connection->groupConstructionAndSerialize==true && connection->downloadGroup.Size()>0) - { - // Push back buffered packets in front of this one - unsigned int i; - for (i=0; i < connection->downloadGroup.Size(); i++) - rakPeerInterface->PushBackPacket(connection->downloadGroup[i],false); - - // Push this one to be last too. It will be processed again, but the second time - // groupConstructionAndSerialize will be false and downloadGroup will be empty, so it will go past this block - connection->downloadGroup.Clear(__FILE__,__LINE__); - rakPeerInterface->PushBackPacket(packet,false); - - return RR_STOP_PROCESSING; - } - - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - connection->gotDownloadComplete=true; - connection->DeserializeOnDownloadComplete(&bsIn); - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3* ReplicaManager3::GetReplicaByNetworkID(NetworkID networkId, WorldId worldId) -{ - RM3World *world = worldsArray[worldId]; - - unsigned int i; - for (i=0; i < world->userReplicaList.Size(); i++) - { - if (world->userReplicaList[i]->GetNetworkID()==networkId) - return world->userReplicaList[i]; - } - return 0; -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -void ReplicaManager3::BroadcastDestructionList(DataStructures::List &replicaListSource, const SystemAddress &exclusionAddress, WorldId worldId) -{ - RakNet::BitStream bsOut; - unsigned int i,j; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - DataStructures::List replicaList; - - for (i=0; i < replicaListSource.Size(); i++) - { - if (replicaListSource[i]==currentlyDeallocatingReplica) - continue; - replicaList.Push(replicaListSource[i], __FILE__, __LINE__); - } - - if (replicaList.Size()==0) - return; - - for (i=0; i < replicaList.Size(); i++) - { - if (replicaList[i]->deletingSystemGUID==UNASSIGNED_RAKNET_GUID) - replicaList[i]->deletingSystemGUID=GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - } - - for (j=0; j < world->connectionList.Size(); j++) - { - if (world->connectionList[j]->GetSystemAddress()==exclusionAddress) - continue; - - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); - bsOut.Write(worldId); - uint16_t cnt=0; - bsOut.Write(cnt); // No construction - cnt=(uint16_t) replicaList.Size(); - BitSize_t cntOffset=bsOut.GetWriteOffset();; - bsOut.Write(cnt); // Overwritten at send call - cnt=0; - - for (i=0; i < replicaList.Size(); i++) - { - if (world->connectionList[j]->HasReplicaConstructed(replicaList[i])==false) - continue; - cnt++; - - NetworkID networkId; - networkId=replicaList[i]->GetNetworkID(); - bsOut.Write(networkId); - BitSize_t offsetStart, offsetEnd; - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); - bsOut.Write(replicaList[i]->deletingSystemGUID); - replicaList[i]->SerializeDestruction(&bsOut, world->connectionList[j]); - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - - if (cnt>0) - { - BitSize_t curOffset=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(cntOffset); - bsOut.Write(cnt); - bsOut.SetWriteOffset(curOffset); - rakPeerInterface->Send(&bsOut,defaultSendParameters.priority,defaultSendParameters.reliability,defaultSendParameters.orderingChannel,world->connectionList[j]->GetSystemAddress(),false, defaultSendParameters.sendReceipt); - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -void ReplicaManager3::BroadcastDestruction(Replica3 *replica, const SystemAddress &exclusionAddress) -{ - DataStructures::List replicaList; - replicaList.Push(replica, _FILE_AND_LINE_ ); - BroadcastDestructionList(replicaList,exclusionAddress); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3::Connection_RM3(const SystemAddress &_systemAddress, RakNetGUID _guid) -: systemAddress(_systemAddress), guid(_guid) -{ - isValidated=false; - isFirstConstruction=true; - groupConstructionAndSerialize=false; - gotDownloadComplete=false; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3::~Connection_RM3() -{ - unsigned int i; - for (i=0; i < constructedReplicaList.Size(); i++) - RakNet::OP_DELETE(constructedReplicaList[i], _FILE_AND_LINE_); - for (i=0; i < queryToConstructReplicaList.Size(); i++) - RakNet::OP_DELETE(queryToConstructReplicaList[i], _FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::GetConstructedReplicas(DataStructures::List &objectsTheyDoHave) -{ - objectsTheyDoHave.Clear(true,_FILE_AND_LINE_); - for (unsigned int idx=0; idx < constructedReplicaList.Size(); idx++) - { - objectsTheyDoHave.Push(constructedReplicaList[idx]->replica, _FILE_AND_LINE_ ); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Connection_RM3::HasReplicaConstructed(RakNet::Replica3 *replica) -{ - bool objectExists; - constructedReplicaList.GetIndexFromKey(replica, &objectExists); - return objectExists; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::SendSerializeHeader(RakNet::Replica3 *replica, RakNet::Time timestamp, RakNet::BitStream *bs, WorldId worldId) -{ - bs->Reset(); - - if (timestamp!=0) - { - bs->Write((MessageID)ID_TIMESTAMP); - bs->Write(timestamp); - } - bs->Write((MessageID)ID_REPLICA_MANAGER_SERIALIZE); - bs->Write(worldId); - bs->Write(replica->GetNetworkID()); -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::ClearDownloadGroup(RakPeerInterface *rakPeerInterface) -{ - unsigned int i; - for (i=0; i < downloadGroup.Size(); i++) - rakPeerInterface->DeallocatePacket(downloadGroup[i]); - downloadGroup.Clear(__FILE__,__LINE__); -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -SendSerializeIfChangedResult Connection_RM3::SendSerialize(RakNet::Replica3 *replica, bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::BitStream serializationData[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::Time timestamp, PRO sendParameters[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakPeerInterface *rakPeer, unsigned char worldId, RakNet::Time curTime) -{ - bool channelHasData; - BitSize_t sum=0; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (indicesToSend[z]) - sum+=serializationData[z].GetNumberOfBitsUsed(); - } - - RakNet::BitStream out; - BitSize_t bitsPerChannel[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - - if (sum==0) - { - memset(bitsPerChannel, 0, sizeof(bitsPerChannel)); - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - return SSICR_DID_NOT_SEND_DATA; - } - - RakAssert(replica->GetNetworkID()!=UNASSIGNED_NETWORK_ID); - - BitSize_t bitsUsed; - - int channelIndex; - PRO lastPro=sendParameters[0]; - - for (channelIndex=0; channelIndex < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex++) - { - if (channelIndex==0) - { - SendSerializeHeader(replica, timestamp, &out, worldId); - } - else if (lastPro!=sendParameters[channelIndex]) - { - // Write out remainder - for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) - { - bitsPerChannel[channelIndex2]=0; - out.Write(false); - } - - // Send remainder - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); - - // If no data left to send, quit out - bool anyData=false; - for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) - { - if (serializationData[channelIndex2].GetNumberOfBitsUsed()>0) - { - anyData=true; - break; - } - } - if (anyData==false) - return SSICR_SENT_DATA; - - // Restart stream - SendSerializeHeader(replica, timestamp, &out, worldId); - - for (int channelIndex2=0; channelIndex2 < channelIndex; channelIndex2++) - { - bitsPerChannel[channelIndex2]=0; - out.Write(false); - } - lastPro=sendParameters[channelIndex]; - } - - bitsUsed=serializationData[channelIndex].GetNumberOfBitsUsed(); - channelHasData = indicesToSend[channelIndex]==true && bitsUsed>0; - out.Write(channelHasData); - if (channelHasData) - { - bitsPerChannel[channelIndex] = bitsUsed; - out.WriteCompressed(bitsUsed); - out.AlignWriteToByteBoundary(); - out.Write(serializationData[channelIndex]); - // Crap, forgot this line, was a huge bug in that I'd only send to the first 3 systems - serializationData[channelIndex].ResetReadPointer(); - } - else - { - bitsPerChannel[channelIndex] = 0; - } - } - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); - return SSICR_SENT_DATA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -SendSerializeIfChangedResult Connection_RM3::SendSerializeIfChanged(LastSerializationResult *lsr, SerializeParameters *sp, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager, RakNet::Time curTime) -{ - RakNet::Replica3 *replica = lsr->replica; - - if (replica->GetNetworkID()==UNASSIGNED_NETWORK_ID) - return SSICR_DID_NOT_SEND_DATA; - - RM3QuerySerializationResult rm3qsr = replica->QuerySerialization(this); - if (rm3qsr==RM3QSR_NEVER_CALL_SERIALIZE) - { - // Never again for this connection and replica pair - OnNeverSerialize(lsr, replicaManager); - return SSICR_NEVER_SERIALIZE; - } - - if (rm3qsr==RM3QSR_DO_NOT_CALL_SERIALIZE) - return SSICR_DID_NOT_SEND_DATA; - - if (replica->forceSendUntilNextUpdate) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (replica->lastSentSerialization.indicesToSend[z]) - sp->bitsWrittenSoFar+=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed(); - } - return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, replica->lastSentSerialization.bitStream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) - { - sp->outputBitstream[i].Reset(); - if (lsr->lastSerializationResultBS) - sp->lastSentBitstream[i]=&lsr->lastSerializationResultBS->bitStream[i]; - else - sp->lastSentBitstream[i]=&replica->lastSentSerialization.bitStream[i]; - } - - RM3SerializationResult serializationResult = replica->Serialize(sp); - - if (serializationResult==RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION) - { - // Never again for this connection and replica pair - OnNeverSerialize(lsr, replicaManager); - return SSICR_NEVER_SERIALIZE; - } - - if (serializationResult==RM3SR_DO_NOT_SERIALIZE) - { - // Don't serialize this tick only - return SSICR_DID_NOT_SEND_DATA; - } - - // This is necessary in case the user in the Serialize() function for some reason read the bitstream they also wrote - // WIthout this code, the Write calls to another bitstream would not write the entire bitstream - BitSize_t sum=0; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp->outputBitstream[z].ResetReadPointer(); - sum+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - } - - if (sum==0) - { - // Don't serialize this tick only - return SSICR_DID_NOT_SEND_DATA; - } - - if (serializationResult==RM3SR_SERIALIZED_ALWAYS) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - - lsr->AllocBS(); - lsr->lastSerializationResultBS->bitStream[z].Reset(); - lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - } - return SendSerialize(replica, allIndices, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - if (serializationResult==RM3SR_SERIALIZED_ALWAYS_IDENTICALLY) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - replica->lastSentSerialization.indicesToSend[z]=sp->outputBitstream[z].GetNumberOfBitsUsed()>0; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - replica->lastSentSerialization.bitStream[z].Reset(); - replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - replica->forceSendUntilNextUpdate=true; - } - return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && - (serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION || - ((sp->outputBitstream[z].GetNumberOfBitsUsed()!=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed() || - memcmp(sp->outputBitstream[z].GetData(), replica->lastSentSerialization.bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0)))) - { - indicesToSend[z]=true; - replica->lastSentSerialization.indicesToSend[z]=true; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - replica->lastSentSerialization.bitStream[z].Reset(); - replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - replica->forceSendUntilNextUpdate=true; - } - else - { - indicesToSend[z]=false; - replica->lastSentSerialization.indicesToSend[z]=false; - } - } - } - else - { - lsr->AllocBS(); - - // RM3SR_SERIALIZED_UNIQUELY - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && - (sp->outputBitstream[z].GetNumberOfBitsUsed()!=lsr->lastSerializationResultBS->bitStream[z].GetNumberOfBitsUsed() || - memcmp(sp->outputBitstream[z].GetData(), lsr->lastSerializationResultBS->bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0) - ) - { - indicesToSend[z]=true; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - lsr->lastSerializationResultBS->bitStream[z].Reset(); - lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - } - else - { - indicesToSend[z]=false; - } - } - } - - - if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) - replica->forceSendUntilNextUpdate=true; - - // Send out the data - return SendSerialize(replica, indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::OnLocalReference(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - RakAssert(replica3); - (void) replicaManager; - (void) constructionMode; - -#ifdef _DEBUG - for (unsigned int i=0; i < queryToConstructReplicaList.Size(); i++) - { - if (queryToConstructReplicaList[i]->replica==replica3) - { - RakAssert("replica added twice to queryToConstructReplicaList" && 0); - } - } - - if (constructedReplicaList.HasData(replica3)==true) - { - RakAssert("replica added to queryToConstructReplicaList when already in constructedReplicaList" && 0); - } -#endif - - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica3; - queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDereference(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - if (replica3->GetNetworkIDManager() == 0) - return; - - LastSerializationResult* lsr=0; - unsigned int idx; - - bool objectExists; - idx=constructedReplicaList.GetIndexFromKey(replica3, &objectExists); - if (objectExists) - { - lsr=constructedReplicaList[idx]; - constructedReplicaList.RemoveAtIndex(idx); - } - - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica==replica3) - { - lsr=queryToConstructReplicaList[idx]; - queryToConstructReplicaList.RemoveAtIndex(idx); - break; - } - } - - for (idx=0; idx < queryToSerializeReplicaList.Size(); idx++) - { - if (queryToSerializeReplicaList[idx]->replica==replica3) - { - lsr=queryToSerializeReplicaList[idx]; - queryToSerializeReplicaList.RemoveAtIndex(idx); - break; - } - } - - for (idx=0; idx < queryToDestructReplicaList.Size(); idx++) - { - if (queryToDestructReplicaList[idx]->replica==replica3) - { - lsr=queryToDestructReplicaList[idx]; - queryToDestructReplicaList.RemoveAtIndex(idx); - break; - } - } - - ValidateLists(replicaManager); - - if (lsr) - RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadFromThisSystem(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - RakAssert(replica3); - - ValidateLists(replicaManager); - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica3; - - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int j; - for (j=0; j < queryToConstructReplicaList.Size(); j++) - { - if (queryToConstructReplicaList[j]->replica==replica3 ) - { - queryToConstructReplicaList.RemoveAtIndex(j); - break; - } - } - - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - } - - if (constructedReplicaList.Insert(lsr->replica, lsr, true, _FILE_AND_LINE_) != (unsigned) -1) - { - //assert(queryToSerializeReplicaList.GetIndexOf(replica3)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - } - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadFromOtherSystem(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int j; - for (j=0; j < queryToConstructReplicaList.Size(); j++) - { - if (queryToConstructReplicaList[j]->replica==replica3 ) - { - return; - } - } - - OnLocalReference(replica3, replicaManager); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnNeverConstruct(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnConstructToThisConnection(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); - //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnConstructToThisConnection(Replica3 *replica, ReplicaManager3 *replicaManager) -{ - RakAssert(replica); - RakAssert(QueryConstructionMode()==QUERY_CONNECTION_FOR_REPLICA_LIST); - (void) replicaManager; - - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica; - constructedReplicaList.Insert(replica,lsr,true,_FILE_AND_LINE_); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnNeverSerialize(LastSerializationResult *lsr, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]==lsr ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnReplicaAlreadyExists(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); - //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadExisting(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int idx; - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica==replica3) - { - OnConstructToThisConnection(idx, replicaManager); - return; - } - } - } - else - { - OnConstructToThisConnection(replica3, replicaManager); - } -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnSendDestructionFromQuery(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToDestructReplicaList[queryToDestructIdx]; - queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]->replica==lsr->replica ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - for (j=0; j < constructedReplicaList.Size(); j++) - { - if (constructedReplicaList[j]->replica==lsr->replica ) - { - constructedReplicaList.RemoveAtIndex(j); - break; - } - } - //assert(queryToConstructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDoNotQueryDestruction(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::ValidateLists(ReplicaManager3 *replicaManager) const -{ - (void) replicaManager; - /* -#ifdef _DEBUG - // Each object should exist only once in either constructedReplicaList or queryToConstructReplicaList - // replicaPointer from LastSerializationResult should be same among all lists - unsigned int idx, idx2; - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - idx2=queryToConstructReplicaList.GetIndexOf(constructedReplicaList[idx]->replica); - if (idx2!=(unsigned int)-1) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - idx2=constructedReplicaList.GetIndexOf(queryToConstructReplicaList[idx]->replica); - if (idx2!=(unsigned int)-1) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - LastSerializationResult *lsr, *lsr2; - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - lsr=constructedReplicaList[idx]; - - idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToSerializeReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToDestructReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - } - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - lsr=queryToConstructReplicaList[idx]; - - idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToSerializeReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToDestructReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - } - - // Verify pointer integrity - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - if (constructedReplicaList[idx]->replica->replicaManager!=replicaManager) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - // Verify pointer integrity - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica->replicaManager!=replicaManager) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } -#endif - */ -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::SendConstruction(DataStructures::List &newObjects, DataStructures::List &deletedObjects, PRO sendParameters, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager3) -{ - if (newObjects.Size()==0 && deletedObjects.Size()==0) - return; - - // All construction and destruction takes place in the same network message - // Otherwise, if objects rely on each other being created the same tick to be valid, this won't always be true - // DataStructures::List serializedObjects; - BitSize_t offsetStart, offsetStart2, offsetEnd; - unsigned int newListIndex, oldListIndex; - RakNet::BitStream bsOut; - NetworkID networkId; - if (isFirstConstruction) - { - bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_STARTED); - bsOut.Write(worldId); - SerializeOnDownloadStarted(&bsOut); - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - } - - // LastSerializationResult* lsr; - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); - bsOut.Write(worldId); - uint16_t objectSize = (uint16_t) newObjects.Size(); - bsOut.Write(objectSize); - - // Construction - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); // overwritten to point to the end of the stream - networkId=newObjects[newListIndex]->GetNetworkID(); - bsOut.Write(networkId); - - RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); - bool actuallyCreateObject = cs==RM3CS_SEND_CONSTRUCTION; - bsOut.Write(actuallyCreateObject); - bsOut.AlignWriteToByteBoundary(); - - if (actuallyCreateObject) - { - // Actually create the object - bsOut.Write(newObjects[newListIndex]->creatingSystemGUID); - offsetStart2=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart2); // overwritten to point to after the call to WriteAllocationID - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - newObjects[newListIndex]->WriteAllocationID(this, &bsOut); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart2); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - newObjects[newListIndex]->SerializeConstruction(&bsOut, this); - } - else - { - newObjects[newListIndex]->SerializeConstructionExisting(&bsOut, this); - } - - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - - RakNet::BitStream bsOut2; - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - bsOut2.Reset(); - RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); - if (cs==RM3CS_SEND_CONSTRUCTION) - { - newObjects[newListIndex]->PostSerializeConstruction(&bsOut2, this); - } - else - { - RakAssert(cs==RM3CS_ALREADY_EXISTS_REMOTELY); - newObjects[newListIndex]->PostSerializeConstructionExisting(&bsOut2, this); - } - if (bsOut2.GetNumberOfBitsUsed()>0) - { - bsOut.Write(true); - bsOut.AlignWriteToByteBoundary(); - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); // overwritten to point to the end of the stream - networkId=newObjects[newListIndex]->GetNetworkID(); - bsOut.Write(networkId); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - bsOut.Write(&bsOut2); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - else - bsOut.Write(false); - } - bsOut.AlignWriteToByteBoundary(); - - // Destruction - objectSize = (uint16_t) deletedObjects.Size(); - bsOut.Write(objectSize); - for (oldListIndex=0; oldListIndex < deletedObjects.Size(); oldListIndex++) - { - networkId=deletedObjects[oldListIndex]->GetNetworkID(); - bsOut.Write(networkId); - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); - deletedObjects[oldListIndex]->deletingSystemGUID=rakPeer->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - bsOut.Write(deletedObjects[oldListIndex]->deletingSystemGUID); - deletedObjects[oldListIndex]->SerializeDestruction(&bsOut, this); - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - - // TODO - shouldn't this be part of construction? - - // Initial Download serialize to a new system - // Immediately send serialize after construction if the replica object already has saved data - // If the object was serialized identically, and does not change later on, then the new connection never gets the data - SerializeParameters sp; - sp.whenLastSerialized=0; - RakNet::BitStream emptyBs; - for (int index=0; index < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) - { - sp.lastSentBitstream[index]=&emptyBs; - sp.pro[index]=sendParameters; - sp.pro[index].reliability=RELIABLE_ORDERED; - } - - sp.bitsWrittenSoFar=0; -// RakNet::Time t = RakNet::GetTimeMS(); - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - sp.destinationConnection=this; - sp.messageTimestamp=0; - RakNet::Replica3 *replica = newObjects[newListIndex]; - // 8/22/09 Forgot ResetWritePointer - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.outputBitstream[z].ResetWritePointer(); - } - - RM3SerializationResult res = replica->Serialize(&sp); - if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && - res!=RM3SR_DO_NOT_SERIALIZE && - res!=RM3SR_SERIALIZED_UNIQUELY) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - } - SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, rakPeer, worldId, GetTime()); -/// newObjects[newListIndex]->whenLastSerialized=t; - - } - // else wait for construction request accepted before serializing - } - - if (isFirstConstruction) - { - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE); - bsOut.Write(worldId); - SerializeOnDownloadComplete(&bsOut); - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - } - - isFirstConstruction=false; - -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::SendValidation(RakNet::RakPeerInterface *rakPeer, WorldId worldId) -{ - // Hijack to mean sendValidation - RakNet::BitStream bsOut; - bsOut.Write((MessageID)ID_REPLICA_MANAGER_SCOPE_CHANGE); - bsOut.Write(worldId); - rakPeer->Send(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,systemAddress,false); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3::Replica3() -{ - creatingSystemGUID=UNASSIGNED_RAKNET_GUID; - deletingSystemGUID=UNASSIGNED_RAKNET_GUID; - replicaManager=0; - forceSendUntilNextUpdate=false; - lsr=0; - referenceIndex = (uint32_t)-1; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3::~Replica3() -{ - if (replicaManager) - { - replicaManager->Dereference(this); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Replica3::BroadcastDestruction(void) -{ - replicaManager->BroadcastDestruction(this,UNASSIGNED_SYSTEM_ADDRESS); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNetGUID Replica3::GetCreatingSystemGUID(void) const -{ - return creatingSystemGUID; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_ClientConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3CS_SEND_CONSTRUCTION; - // Send back to the owner client too, because they couldn't assign the network ID - if (isThisTheServer) - return RM3CS_SEND_CONSTRUCTION; - return RM3CS_NEVER_CONSTRUCT; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_ClientConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) -{ - (void) sourceConnection; - (void) isThisTheServer; - - // OK to create - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_ServerConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - - if (isThisTheServer) - return RM3CS_SEND_CONSTRUCTION; - return RM3CS_NEVER_CONSTRUCT; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_ServerConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) -{ - (void) sourceConnection; - if (isThisTheServer) - return false; - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) -{ - (void) destinationConnection; - - if (p2pMode==R3P2PM_SINGLE_OWNER) - { - // We send to all, others do nothing - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3CS_SEND_CONSTRUCTION; - - // RM3CS_NEVER_CONSTRUCT will not send the object, and will not Serialize() it - return RM3CS_NEVER_CONSTRUCT; - } - else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_SEND_CONSTRUCTION; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_ALREADY_EXISTS_REMOTELY; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT; - } - else - { - RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); - - // RM3CS_ALREADY_EXISTS_REMOTELY will not send the object, but WILL call QuerySerialization() and Serialize() on it. - return RM3CS_ALREADY_EXISTS_REMOTELY; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_PeerToPeer(RakNet::Connection_RM3 *sourceConnection) -{ - (void) sourceConnection; - - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_ClientSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - // Owner client sends to all - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3QSR_CALL_SERIALIZE; - // Server sends to all but owner client - if (isThisTheServer && destinationConnection->GetRakNetGUID()!=creatingSystemGUID) - return RM3QSR_CALL_SERIALIZE; - // Remote clients do not send - return RM3QSR_NEVER_CALL_SERIALIZE; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_ServerSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - // Server sends to all - if (isThisTheServer) - return RM3QSR_CALL_SERIALIZE; - - // Clients do not send - return RM3QSR_NEVER_CALL_SERIALIZE; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) -{ - (void) destinationConnection; - - if (p2pMode==R3P2PM_SINGLE_OWNER) - { - // Owner peer sends to all - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3QSR_CALL_SERIALIZE; - - // Remote peers do not send - return RM3QSR_NEVER_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_DO_NOT_CALL_SERIALIZE; - } - else - { - RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); - return RM3QSR_DO_NOT_CALL_SERIALIZE; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Client(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Server(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_PeerToPeer(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -#endif // _RAKNET_SUPPORT_* diff --git a/node_modules/npm-mas-mas/cmaki_generator/run.sh b/node_modules/npm-mas-mas/cmaki_generator/run.sh deleted file mode 100644 index 72b0a36..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/run.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -e -MODE=${1} -echo running in mode ${MODE} ... -mkdir -p build/${MODE} -pushd build/${MODE} -cmake ../.. -DCMAKE_BUILD_TYPE=$MODE -DCMAKE_MODULE_PATH=$(pwd)/../../cmaki -DFIRST_ERROR=1 -cmake --build . --config $MODE --target install -- -j8 -k || cmake --build . --config ${MODE} --target install -- -j1 -ctest . --no-compress-output --output-on-failure -T Test -C ${MODE} -V -popd diff --git a/node_modules/npm-mas-mas/cmaki_generator/run_test.sh b/node_modules/npm-mas-mas/cmaki_generator/run_test.sh deleted file mode 100644 index 967bf29..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/run_test.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -function print_if_has_content() -{ - file=$1 - minimumsize=400 - actualsize=$(wc -c <"$file") - if [ $actualsize -ge $minimumsize ]; - then - cat $file - fi -} - -echo Running test: $1 -export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer-3.6) -export ASAN_OPTIONS="check_initialization_order=1" -rm $1.coverage 2> /dev/null -rm $1.gcno 2> /dev/null -rm default.profraw 2> /dev/null -./$1 -ret=$? -llvm-profdata-3.6 merge -o $1.gcno default.profraw 2> /dev/null -llvm-cov-3.6 show ./$1 -instr-profile=$1.gcno > $1.coverage -cat $1.coverage | ansi2html > $1.html -print_if_has_content $1.html -exit $ret - diff --git a/node_modules/npm-mas-mas/cmaki_generator/run_tests.py b/node_modules/npm-mas-mas/cmaki_generator/run_tests.py deleted file mode 100644 index 66f01d7..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/run_tests.py +++ /dev/null @@ -1,175 +0,0 @@ -import os -import utils -import logging -from third_party import platforms -from third_party import build_unittests_foldername -from itertools import product -from third_party import get_identifier - -def run_tests(node, parameters, compiler_replace_maps, unittests): - - old_cwd = os.getcwd() - - rootdir = parameters.rootdir - rootdir = utils.get_norm_path(rootdir) - rootdir = rootdir.replace('\\', '/') - - cmakelib_dir = parameters.cmakefiles - cmakelib_dir = utils.get_norm_path(cmakelib_dir) - cmakelib_dir = cmakelib_dir.replace('\\', '/') - - cmake3p_dir = parameters.prefix - cmake3p_dir = utils.get_norm_path(cmake3p_dir) - cmake3p_dir = cmake3p_dir.replace('\\', '/') - - cmake_prefix = parameters.prefix - cmake_prefix = utils.get_norm_path(cmake_prefix) - cmake_prefix = cmake_prefix.replace('\\', '/') - - cmake_third_party_dir = parameters.third_party_dir - cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) - cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') - - package = node.get_package_name() - package_upper = node.get_package_name_norm_upper() - version = node.get_version() - packing = node.is_packing() - if not packing: - logging.warning("No need run_tests, because wasn't generated a package") - return 0 - - # prepare unittests - # can be a file or content - unittest_value = node.get_unittest() - if unittest_value is not None: - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - builddir = node.get_build_directory(plat, build_mode) - path_test = os.path.join(builddir, build_unittests_foldername) - utils.trymkdir(path_test) - - # is is a file - unittest_path = os.path.join(builddir, unittest_value) - if os.path.isfile(unittest_path): - with open(unittest_path, 'rt') as f: - unittest_value = f.read() - - with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: - f.write(unittest_value) - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "prepare"') - break - else: - logging.warning('[%s] No test present.' % package) - - folder_3rdparty = parameters.third_party_dir - output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) - - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, reversed(build_modes)): - for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): - # verify md5sum - install_directory = node.get_install_directory(plat) - workspace = node.get_workspace(plat) - utils.trymkdir(install_directory) - with utils.working_directory(install_directory): - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) - prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) - if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): - with open(prefix_package_md5, 'rt') as f: - md5sum = f.read().strip() - - try: - logging.debug("expected md5: %s" % md5sum) - for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): - if len(line) > 0: - # md5sum filename - chunks = line.split(' ') - chunks = list(filter(None, chunks)) - assert(len(chunks) > 0) - md5sum_real = chunks[0] - logging.debug("real md5: %s" % md5sum_real) - - if (md5sum != md5sum_real): - logging.error('Error en generated md5sum file!!!') - logging.error('Expected: %s' % md5sum) - logging.error('Found: %s' % md5sum_real) - # add error to node - node.ret += 1 - except utils.NotFoundProgram: - logging.info('can\'t verify md5 because not found cmake') - else: - logging.warning('Skipping verification md5 because don\'t exists package or md5') - - logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) - - # OJO con borrar cmake3p, se borra la marca - # node.remove_cmake3p( cmake3p_dir ) - - builddir = os.path.join(old_cwd, node.get_build_directory(plat, build_mode)) - logging.info('Using builddir %s' % builddir) - unittest_folder = os.path.join(builddir, build_unittests_foldername) - unittest_found = os.path.join(unittest_folder, 'main.cpp') - unittest_found = unittest_found.replace('\\', '/') - unittest_root = os.path.join(old_cwd, build_unittests_foldername) - - if os.path.exists(unittest_found): - - logging.info('Search cmakelib in %s' % cmakelib_dir) - if os.path.isdir(os.path.join(cmakelib_dir)): - - with utils.working_directory(unittest_folder): - - generator_extra = '' - if generator is not None: - generator_extra = '-G"%s"' % generator - - find_packages = [] - find_packages.append(package) - for dep in node.get_depends_raw(): - package_name = dep.get_package_name() - find_packages.append(package_name) - find_packages_str = ';'.join(find_packages) - - # remove CMakeCache.txt for avoid problems when - # change of generator - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - - cmd = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % ( - unittest_root, - generator_extra, - cmake_prefix, - get_identifier('COMPILER'), - get_identifier('ALL'), - cmakelib_dir, - package, - package_upper, - build_mode, - cmake_third_party_dir, - unittest_found, - cmake_prefix, - find_packages_str, - build_mode, - build_mode) - ret = utils.safe_system(cmd, env=env_modified) - node.ret += abs(ret) - if ret != 0: - unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' - else: - unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' - else: - unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' - else: - unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' - - if node.ret != 0: - logging.warning('Cleaning packages because tests are failed.') - node.remove_packages() - - # successful - return True - diff --git a/node_modules/npm-mas-mas/cmaki_generator/save_package.py b/node_modules/npm-mas-mas/cmaki_generator/save_package.py deleted file mode 100755 index 57fd37a..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/save_package.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import sys -import logging -import argparse -import urllib -import csv -import utils -import subprocess - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - parser.add_argument('--version', required=True, dest='version', help='version package fixed', default=None) - parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) - parameters = parser.parse_args() - - depends_file = parameters.depends - if os.path.exists(depends_file): - data = utils.deserialize(depends_file) - else: - data = {} - # serialize if is new data - if parameters.name not in data: - data[parameters.name] = parameters.version - logging.info('serialize data = %s' % data) - depends_file_tmp = depends_file + '.tmp' - utils.serialize(data, depends_file_tmp) - ret = subprocess.call('python -m json.tool %s > %s' % (depends_file_tmp, depends_file), shell=True) - os.remove(depends_file_tmp) - sys.exit(ret) - diff --git a/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt deleted file mode 100644 index 6683d9c..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt +++ /dev/null @@ -1,1366 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -project(SDL2 C) -include(CheckFunctionExists) -include(CheckLibraryExists) -include(CheckIncludeFiles) -include(CheckIncludeFile) -include(CheckSymbolExists) -include(CheckCSourceRuns) -include(CheckCCompilerFlag) -include(CheckTypeSize) -include(CheckStructHasMember) -include(CMakeDependentOption) -include(FindPkgConfig) -set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") -include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) -include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) - -# General settings -# Edit include/SDL_version.h and change the version, then: -# SDL_MICRO_VERSION += 1; -# SDL_INTERFACE_AGE += 1; -# SDL_BINARY_AGE += 1; -# if any functions have been added, set SDL_INTERFACE_AGE to 0. -# if backwards compatibility has been broken, -# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. -set(SDL_MAJOR_VERSION 2) -set(SDL_MINOR_VERSION 0) -set(SDL_MICRO_VERSION 3) -set(SDL_INTERFACE_AGE 1) -set(SDL_BINARY_AGE 3) -set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") - -# Calculate a libtool-like version number -math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") -set(LT_REVISION "${SDL_INTERFACE_AGE}") -set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") -set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") - -message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") - -# General settings & flags -set(LIBRARY_OUTPUT_DIRECTORY "build") -# Check for 64 or 32 bit -set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) -if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(ARCH_64 TRUE) - set(PROCESSOR_ARCH "x64") -else() - set(ARCH_64 FALSE) - set(PROCESSOR_ARCH "x86") -endif() -set(LIBNAME SDL2) -if(NOT LIBTYPE) - set(LIBTYPE SHARED) -endif() - -# Get the platform -if(WIN32) - if(NOT WINDOWS) - set(WINDOWS TRUE) - endif() -elseif(UNIX AND NOT APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") - set(LINUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") - set(NETBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") - set(OPENBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") - set(GNU TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") - set(BSDI TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") - set(SYSV5 TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") - set(SOLARIS TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") - set(HPUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") - set(AIX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") - set(MINIX TRUE) - endif() -elseif(APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") - set(DARWIN TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") - set(MACOSX TRUE) - endif() - # TODO: iOS? -elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") - message_error("BeOS support has been removed as of SDL 2.0.2.") -elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") - set(HAIKU TRUE) -endif() - -# Don't mistake osx for unix -if(UNIX AND NOT APPLE) - set(UNIX_SYS ON) -else() - set(UNIX_SYS OFF) -endif() - -if(UNIX OR APPLE) - set(UNIX_OR_MAC_SYS ON) -else() - set(UNIX_OR_MAC_SYS OFF) -endif() - -if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. - set(PTHREADS_ENABLED_BY_DEFAULT ON) -else() - set(PTHREADS_ENABLED_BY_DEFAULT OFF) -endif() - -# Default option knobs -if(APPLE OR ARCH_64) - set(OPT_DEF_SSEMATH ON) -endif() -if(UNIX OR MINGW OR MSYS) - set(OPT_DEF_LIBC ON) -endif() - -# Compiler info -if(CMAKE_COMPILER_IS_GNUCC) - set(USE_GCC TRUE) - set(OPT_DEF_ASM TRUE) -elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") - set(USE_CLANG TRUE) - set(OPT_DEF_ASM TRUE) -elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ - set(OPT_DEF_ASM TRUE) - #set(CMAKE_C_FLAGS "/ZI /WX- / -else() - set(OPT_DEF_ASM FALSE) -endif() - -# Default flags, if not set otherwise -if("$ENV{CFLAGS}" STREQUAL "") - if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "-g -O3") - endif() -else() - set(CMAKE_C_FLAGS "$ENV{CFLAGS}") - list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") -endif() -if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 - list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") -endif() - -if(MSVC) - option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) - if(FORCE_STATIC_VCRT) - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - if(${flag_var} MATCHES "/MD") - string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - endif() - endforeach() - endif() -endif() - -# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, -# etc. are created correctly. -set(SDL_LIBS "-lSDL2") -set(SDL_CFLAGS "") - -# Emscripten toolchain has a nonempty default value for this, and the checks -# in this file need to change that, so remember the original value, and -# restore back to that afterwards. For check_function_exists() to work in -# Emscripten, this value must be at its default value. -set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) - -if(CYGWIN) - # We build SDL on cygwin without the UNIX emulation layer - include_directories("-I/usr/include/mingw") - set(CMAKE_REQUIRED_FLAGS "-mno-cygwin") - check_c_source_compiles("int main(int argc, char **argv) {}" - HAVE_GCC_NO_CYGWIN) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_GCC_NO_CYGWIN) - list(APPEND EXTRA_LDFLAGS "-mno-cygwin") - list(APPEND SDL_LIBS "-mno-cygwin") - endif() - set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") -endif() - -add_definitions(-DUSING_GENERATED_CONFIG_H) -# General includes -include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) - -if(EMSCRIPTEN) - # Set up default values for the currently supported set of subsystems: - # Emscripten/Javascript does not have assembly support, a dynamic library - # loading architecture, low-level CPU inspection or multithreading. - set(OPT_DEF_ASM FALSE) - set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) - set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) - set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) - set(DLOPEN_ENABLED_BY_DEFAULT OFF) -else() - set(SDL_SHARED_ENABLED_BY_DEFAULT ON) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT ON) - set(SDL_THREADS_ENABLED_BY_DEFAULT ON) - set(SDL_LOADSO_ENABLED_BY_DEFAULT ON) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT ON) - set(DLOPEN_ENABLED_BY_DEFAULT ON) -endif() - -set(SDL_SUBSYSTEMS - Atomic Audio Video Render Events Joystick Haptic Power Threads Timers - File Loadso CPUinfo Filesystem) -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) - set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) - endif() - option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) -endforeach() - -option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") -#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) -set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) -set_option(GCC_ATOMICS "Use gcc builtin atomics" ${USE_GCC}) -set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) -set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) -set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) -set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) -set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) -set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) -set_option(DISKAUDIO "Support the disk writer audio driver" ON) -set_option(DUMMYAUDIO "Support the dummy audio driver" ON) -set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) -dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) -set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) -dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND_SHARED" OFF) -set_option(VIDEO_DUMMY "Use dummy video driver" ON) -set_option(VIDEO_OPENGL "Include OpenGL support" ON) -set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) -set_option(PTHREADS "Use POSIX threads for multi-threading" ${PTHREADS_ENABLED_BY_DEFAULT}) -dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) -set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${DLOPEN_ENABLED_BY_DEFAULT}) -set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) -set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) -dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) -set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) -dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) -set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) -dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) -set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) -dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) -set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) -set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) -set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) -set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) -set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) -set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) -set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) -set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) -set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) -dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) -set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) -foreach(_SUB ${SDL_X11_OPTIONS}) - string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) - dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) -endforeach() -set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) -set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) -set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) - -# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here -# The options below are for compatibility to configure's default behaviour. -set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") -set(SDL_STATIC ON CACHE BOOL "Build a static version of the library") - -# General source files -file(GLOB SOURCE_FILES - ${SDL2_SOURCE_DIR}/src/*.c - ${SDL2_SOURCE_DIR}/src/atomic/*.c - ${SDL2_SOURCE_DIR}/src/audio/*.c - ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c - ${SDL2_SOURCE_DIR}/src/dynapi/*.c - ${SDL2_SOURCE_DIR}/src/events/*.c - ${SDL2_SOURCE_DIR}/src/file/*.c - ${SDL2_SOURCE_DIR}/src/libm/*.c - ${SDL2_SOURCE_DIR}/src/render/*.c - ${SDL2_SOURCE_DIR}/src/render/*/*.c - ${SDL2_SOURCE_DIR}/src/stdlib/*.c - ${SDL2_SOURCE_DIR}/src/thread/*.c - ${SDL2_SOURCE_DIR}/src/timer/*.c - ${SDL2_SOURCE_DIR}/src/video/*.c) - - -if(ASSERTIONS STREQUAL "auto") - # Do nada - use optimization settings to determine the assertion level -elseif(ASSERTIONS STREQUAL "disabled") - set(SDL_DEFAULT_ASSERT_LEVEL 0) -elseif(ASSERTIONS STREQUAL "release") - set(SDL_DEFAULT_ASSERT_LEVEL 1) -elseif(ASSERTIONS STREQUAL "enabled") - set(SDL_DEFAULT_ASSERT_LEVEL 2) -elseif(ASSERTIONS STREQUAL "paranoid") - set(SDL_DEFAULT_ASSERT_LEVEL 3) -else() - message_error("unknown assertion level") -endif() -set(HAVE_ASSERTIONS ${ASSERTIONS}) - -# Compiler option evaluation -if(USE_GCC OR USE_CLANG) - if(DEPENDENCY_TRACKING) - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 3 - #error Dependency tracking requires GCC 3.0 or newer - #endif - int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) - endif() - - if(GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - void *x, *y, *z; - __sync_lock_test_and_set(&a, 4); - __sync_lock_test_and_set(&x, y); - __sync_fetch_and_add(&a, 1); - __sync_bool_compare_and_swap(&a, 5, 10); - __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) - if(NOT HAVE_GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - __sync_lock_test_and_set(&a, 1); - __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) - endif() - endif() - - set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") - check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" - HAVE_GCC_PREFERRED_STACK_BOUNDARY) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror -Wno-error=implicit-function-declaration") - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 4 - #error SDL only uses visibility attributes in GCC 4 or newer - #endif - int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) - if(HAVE_GCC_FVISIBILITY) - list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - check_c_compiler_flag(-Wall HAVE_GCC_WALL) - if(HAVE_GCC_WALL) - if(HAIKU) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") - endif() - endif() -endif() - -if(ASSEMBLY) - if(USE_GCC OR USE_CLANG) - set(SDL_ASSEMBLY_ROUTINES 1) - # TODO: Those all seem to be quite GCC specific - needs to be - # reworked for better compiler support - set(HAVE_ASSEMBLY TRUE) - if(MMX) - set(CMAKE_REQUIRED_FLAGS "-mmmx") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __MMX__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_MMX) - if(HAVE_MMX) - list(APPEND EXTRA_CFLAGS "-mmmx") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(3DNOW) - set(CMAKE_REQUIRED_FLAGS "-m3dnow") - check_c_source_compiles(" - #include - #ifndef __3dNOW__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { - void *p = 0; - _m_prefetch(p); - }" HAVE_3DNOW) - if(HAVE_3DNOW) - list(APPEND EXTRA_CFLAGS "-m3dnow") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE) - set(CMAKE_REQUIRED_FLAGS "-msse") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE) - if(HAVE_SSE) - list(APPEND EXTRA_CFLAGS "-msse") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE2) - set(CMAKE_REQUIRED_FLAGS "-msse2") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE2__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE2) - if(HAVE_SSE2) - list(APPEND EXTRA_CFLAGS "-msse2") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSEMATH) - if(SSE OR SSE2) - if(USE_GCC) - list(APPEND EXTRA_CFLAGS "-mfpmath=387") - endif() - set(HAVE_SSEMATH TRUE) - endif() - endif() - - if(ALTIVEC) - set(CMAKE_REQUIRED_FLAGS "-maltivec") - check_c_source_compiles(" - #include - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) - check_c_source_compiles(" - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set - list(APPEND EXTRA_CFLAGS "-maltivec") - set(SDL_ALTIVEC_BLITTERS 1) - if(HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC_H 1) - endif() - endif() - endif() - elseif(MSVC_VERSION GREATER 1500) - # TODO: SDL_cpuinfo.h needs to support the user's configuration wish - # for MSVC - right now it is always activated - if(NOT ARCH_64) - set(HAVE_MMX TRUE) - set(HAVE_3DNOW TRUE) - endif() - set(HAVE_SSE TRUE) - set(HAVE_SSE2 TRUE) - set(SDL_ASSEMBLY_ROUTINES 1) - endif() -# TODO: -#else() -# if(USE_GCC OR USE_CLANG) -# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-mmx") -# endif() -endif() - -# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define -# anything. -if(LIBC) - if(WINDOWS AND NOT MINGW) - set(HAVE_LIBC TRUE) - foreach(_HEADER stdio.h string.h ctype.h math.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - set(${_HAVE_H} 1) - endforeach() - set(HAVE_SIGNAL_H 1) - foreach(_FN - malloc calloc realloc free qsort abs memset memcpy memmove memcmp - strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp - _stricmp _strnicmp sscanf atan atan2 acos asin ceil copysign cos - cosf fabs floor log pow scalbn sin sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(HAVE_${_UPPER} 1) - endforeach() - if(NOT CYGWIN AND NOT MINGW) - set(HAVE_ALLOCA 1) - endif() - set(HAVE_M_PI 1) - add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI - set(STDC_HEADERS 1) - else() - set(HAVE_LIBC TRUE) - check_include_file(sys/types.h HAVE_SYS_TYPES_H) - foreach(_HEADER - stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h - strings.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - check_include_file("${_HEADER}" ${_HAVE_H}) - endforeach() - - check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) - check_type_size("size_t" SIZEOF_SIZE_T) - check_symbol_exists(M_PI math.h HAVE_M_PI) - # TODO: refine the mprotect check - check_c_source_compiles("#include - #include - int main() { }" HAVE_MPROTECT) - foreach(_FN - strtod malloc calloc realloc free getenv setenv putenv unsetenv - qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat - strdup _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull - atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp - vsscanf vsnprintf fseeko fseeko64 sigaction setjmp - nanosleep sysconf sysctlbyname - ) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - - check_library_exists(m pow "" HAVE_LIBM) - if(HAVE_LIBM) - set(CMAKE_REQUIRED_LIBRARIES m) - foreach(_FN - atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin - sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - set(CMAKE_REQUIRED_LIBRARIES) - list(APPEND EXTRA_LIBS m) - endif() - - check_library_exists(iconv iconv_open "" HAVE_LIBICONV) - if(HAVE_LIBICONV) - list(APPEND EXTRA_LIBS iconv) - endif() - - check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) - endif() -else() - if(WINDOWS) - set(HAVE_STDARG_H 1) - set(HAVE_STDDEF_H 1) - endif() -endif() - - -# Enable/disable various subsystems of the SDL library -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if(NOT SDL_${_OPT}) - set(SDL_${_OPT}_DISABLED 1) - endif() -endforeach() -if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) -endif() -if(SDL_HAPTIC) - if(NOT SDL_JOYSTICK) - # Haptic requires some private functions from the joystick subsystem. - message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") - endif() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(SDL_POWER) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) -endif() -# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: -# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set -# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), -# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings - -# General SDL subsystem options, valid for all platforms -if(SDL_AUDIO) - # CheckDummyAudio/CheckDiskAudio - valid for all platforms - if(DUMMYAUDIO) - set(SDL_AUDIO_DRIVER_DUMMY 1) - file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) - set(HAVE_DUMMYAUDIO TRUE) - endif() - if(DISKAUDIO) - set(SDL_AUDIO_DRIVER_DISK 1) - file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) - set(HAVE_DISKAUDIO TRUE) - endif() -endif() - -if(SDL_DLOPEN) - # Relevant for Unix/Darwin only - if(UNIX OR APPLE) - CheckDLOPEN() - endif() -endif() - -if(SDL_VIDEO) - if(VIDEO_DUMMY) - set(SDL_VIDEO_DRIVER_DUMMY 1) - file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) - set(HAVE_VIDEO_DUMMY TRUE) - set(HAVE_SDL_VIDEO TRUE) - endif() -endif() - -# Platform-specific options and settings -if(EMSCRIPTEN) - # Hide noisy warnings that intend to aid mostly during initial stages of porting a new - # project. Uncomment at will for verbose cross-compiling -I/../ path info. - add_definitions(-Wno-warn-absolute-paths) - if(SDL_AUDIO) - file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) - endif() - if(SDL_FILESYSTEM) - file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) - endif() - if(SDL_JOYSTICK) - file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) - endif() - if(SDL_POWER) - file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) - endif() - if(SDL_VIDEO) - file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) - endif() -elseif(UNIX AND NOT APPLE) - if(SDL_AUDIO) - if(SYSV5 OR SOLARIS OR HPUX) - set(SDL_AUDIO_DRIVER_SUNAUDIO 1) - file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(NETBSD OR OPENBSD) - set(SDL_AUDIO_DRIVER_BSD 1) - file(GLOB BSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/bsd/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${BSD_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(AIX) - set(SDL_AUDIO_DRIVER_PAUDIO 1) - file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - CheckOSS() - CheckALSA() - CheckPulseAudio() - CheckESD() - CheckARTS() - CheckNAS() - CheckSNDIO() - CheckFusionSound() - endif() - - if(SDL_VIDEO) - CheckX11() - CheckMir() - CheckDirectFB() - CheckOpenGLX11() - CheckOpenGLESX11() - CheckWayland() - endif() - - if(LINUX) - check_c_source_compiles(" - #include - #ifndef EVIOCGNAME - #error EVIOCGNAME() ioctl not available - #endif - int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) - - check_c_source_compiles(" - #include - #include - - int main(int argc, char **argv) - { - struct kbentry kbe; - kbe.kb_table = KG_CTRL; - ioctl(0, KDGKBENT, &kbe); - }" HAVE_INPUT_KD) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(HAVE_INPUT_EVENTS) - set(SDL_INPUT_LINUXEV 1) - endif() - - if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) - set(SDL_HAPTIC_LINUX 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - if(HAVE_INPUT_KD) - set(SDL_INPUT_LINUXKD 1) - endif() - - check_include_file("libudev.h" HAVE_LIBUDEV_H) - - # !!! FIXME: this needs pkg-config to find the include path, I think. - check_include_file("dbus/dbus.h" HAVE_DBUS_DBUS_H) - endif() - - if(INPUT_TSLIB) - check_c_source_compiles(" - #include \"tslib.h\" - int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) - if(HAVE_INPUT_TSLIB) - set(SDL_INPUT_TSLIB 1) - list(APPEND EXTRA_LIBS ts) - endif() - endif() - - if(SDL_JOYSTICK) - CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? - if(LINUX) - set(SDL_JOYSTICK_LINUX 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - endif() - - CheckPTHREAD() - - if(CLOCK_GETTIME) - check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - list(APPEND EXTRA_LIBS rt) - set(HAVE_CLOCK_GETTIME 1) - else() - check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - endif() - - check_include_file(linux/version.h HAVE_LINUX_VERSION_H) - if(HAVE_LINUX_VERSION_H) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") - endif() - - if(SDL_POWER) - if(LINUX) - set(SDL_POWER_LINUX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_UNIX 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(RPATH) - set(SDL_RLD_FLAGS "") - if(BSDI OR FREEBSD OR LINUX OR NETBSD) - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") - elseif(SOLARIS) - set(SDL_RLD_FLAGS "-R\${libdir}") - endif() - set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - set(HAVE_RPATH TRUE) - endif() - -elseif(WINDOWS) - find_program(WINDRES windres) - - check_c_source_compiles(" - #include - int main(int argc, char **argv) { }" HAVE_WIN32_CC) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - # Check for DirectX - if(DIRECTX) - if("$ENV{DXSDK_DIR}" STREQUAL "") - message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") - endif() - set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") - check_include_file(d3d9.h HAVE_D3D_H) - check_include_file(d3d11_1.h HAVE_D3D11_H) - check_include_file(ddraw.h HAVE_DDRAW_H) - check_include_file(dsound.h HAVE_DSOUND_H) - check_include_file(dinput.h HAVE_DINPUT_H) - check_include_file(xaudio2.h HAVE_XAUDIO2_H) - check_include_file(dxgi.h HAVE_DXGI_H) - if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H OR HAVE_XAUDIO2_H) - set(HAVE_DIRECTX TRUE) - # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks - link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) - include_directories($ENV{DXSDK_DIR}\\Include) - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_WINMM 1) - file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - - if(HAVE_DSOUND_H) - set(SDL_AUDIO_DRIVER_DSOUND 1) - file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) - endif() - - if(HAVE_XAUDIO2_H) - set(SDL_AUDIO_DRIVER_XAUDIO2 1) - file(GLOB XAUDIO2_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/xaudio2/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${XAUDIO2_AUDIO_SOURCES}) - endif() - endif() - - if(SDL_VIDEO) - # requires SDL_LOADSO on Windows (IME, DX, etc.) - if(NOT SDL_LOADSO) - message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") - endif() - set(SDL_VIDEO_DRIVER_WINDOWS 1) - file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) - - if(RENDER_D3D AND HAVE_D3D_H) - set(SDL_VIDEO_RENDER_D3D 1) - set(HAVE_RENDER_D3D TRUE) - endif() - if(RENDER_D3D AND HAVE_D3D11_H) - set(SDL_VIDEO_RENDER_D3D11 1) - set(HAVE_RENDER_D3D TRUE) - endif() - set(HAVE_SDL_VIDEO TRUE) - endif() - - if(SDL_THREADS) - set(SDL_THREAD_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c - ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) - set(HAVE_SDL_THREADS TRUE) - endif() - - if(SDL_POWER) - set(SDL_POWER_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) - set(HAVE_SDL_POWER TRUE) - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_WINDOWS 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Libraries for Win32 native and MinGW - list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) - - # TODO: in configure.in the check for timers is set on - # cygwin | mingw32* - does this include mingw32CE? - if(SDL_TIMERS) - set(SDL_TIMER_WINDOWS 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(SDL_LOADSO) - set(SDL_LOADSO_WINDOWS 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(SDL_VIDEO) - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_WGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - if(SDL_JOYSTICK) - if(HAVE_DINPUT_H) - set(SDL_JOYSTICK_DINPUT 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_dxjoystick.c) - list(APPEND EXTRA_LIBS dinput8 dxguid dxerr) - else() - set(SDL_JOYSTICK_WINMM 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_mmjoystick.c) - endif() - set(HAVE_SDL_JOYSTICK TRUE) - endif() - - if(SDL_HAPTIC AND HAVE_DINPUT_H) - set(SDL_HAPTIC_DINPUT 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/haptic/windows/SDL_syshaptic.c) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) - if(MINGW OR CYGWIN) - list(APPEND EXTRA_LIBS mingw32) - list(APPEND EXTRA_LDFLAGS "-mwindows") - set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") - list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") - endif() -elseif(APPLE) - # TODO: rework this for proper MacOS X, iOS and Darwin support - - # Requires the darwin file implementation - if(SDL_FILE) - file(GLOB EXTRA_SOURCES ${PROJECT_SOURCE_DIR}/src/file/cocoa/*.m) - set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) - set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) - set(HAVE_SDL_FILE TRUE) - set(SDL_FRAMEWORK_COCOA 1) - else() - message_error("SDL_FILE must be enabled to build on MacOS X") - endif() - - if(SDL_AUDIO) - set(MACOSX_COREAUDIO 1) - file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - set(SDL_FRAMEWORK_COREAUDIO 1) - set(SDL_FRAMEWORK_AUDIOUNIT 1) - endif() - - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_IOKIT 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - endif() - - if(SDL_HAPTIC) - set(SDL_HAPTIC_IOKIT 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - if(NOT SDL_JOYSTICK) - message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") - endif() - endif() - - if(SDL_POWER) - set(SDL_POWER_MACOSX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - set(SDL_FRAMEWORK_CARBON 1) - set(SDL_FRAMEWORK_IOKIT 1) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_COCOA 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) - set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Actually load the frameworks at the end so we don't duplicate include. - if(SDL_FRAMEWORK_COCOA) - find_library(COCOA_LIBRARY Cocoa) - list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) - endif() - if(SDL_FRAMEWORK_IOKIT) - find_library(IOKIT IOKit) - list(APPEND EXTRA_LIBS ${IOKIT}) - endif() - if(SDL_FRAMEWORK_FF) - find_library(FORCEFEEDBACK ForceFeedback) - list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) - endif() - if(SDL_FRAMEWORK_CARBON) - find_library(CARBON_LIBRARY Carbon) - list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) - endif() - if(SDL_FRAMEWORK_COREAUDIO) - find_library(COREAUDIO CoreAudio) - list(APPEND EXTRA_LIBS ${COREAUDIO}) - endif() - if(SDL_FRAMEWORK_AUDIOUNIT) - find_library(AUDIOUNIT AudioUnit) - list(APPEND EXTRA_LIBS ${AUDIOUNIT}) - endif() - - # iOS hack needed - http://code.google.com/p/ios-cmake/ ? - if(SDL_VIDEO) - CheckCOCOA() - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_CGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - if(DARWIN) - find_library(OpenGL_LIBRARY OpenGL) - list(APPEND EXTRA_LIBRARIES ${OpenGL_LIBRARY}) - endif() - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -elseif(HAIKU) - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_HAIKU 1) - file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - set(SDL_FILESYSTEM_HAIKU 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - - if(SDL_TIMERS) - set(SDL_TIMER_HAIKU 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(VIDEO_OPENGL) - # TODO: Use FIND_PACKAGE(OpenGL) instead - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_BGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - list(APPEND EXTRA_LIBS GL) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -endif() - -# Dummies -# configure.in does it differently: -# if not have X -# if enable_X { SDL_X_DISABLED = 1 } -# [add dummy sources] -# so it always adds a dummy, without checking, if it was actually requested. -# This leads to missing internal references on building, since the -# src/X/*.c does not get included. -if(NOT HAVE_SDL_JOYSTICK) - set(SDL_JOYSTICK_DISABLED 1) - if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX - - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - endif() -endif() -if(NOT HAVE_SDL_HAPTIC) - set(SDL_HAPTIC_DISABLED 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(NOT HAVE_SDL_LOADSO) - set(SDL_LOADSO_DISABLED 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) -endif() -if(NOT HAVE_SDL_FILESYSTEM) - set(SDL_FILESYSTEM_DISABLED 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) -endif() - -# We always need to have threads and timers around -if(NOT HAVE_SDL_THREADS) - set(SDL_THREADS_DISABLED 1) - file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) -endif() -if(NOT HAVE_SDL_TIMERS) - set(SDL_TIMERS_DISABLED 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) -endif() - -if(NOT SDLMAIN_SOURCES) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) -endif() - -# Append the -MMD -MT flags -# if(DEPENDENCY_TRACKING) -# if(COMPILER_IS_GNUCC) -# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") -# endif() -# endif() - -configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" - "${SDL2_BINARY_DIR}/include/SDL_config.h") - -# Prepare the flags and remove duplicates -if(EXTRA_LDFLAGS) - list(REMOVE_DUPLICATES EXTRA_LDFLAGS) -endif() -if(EXTRA_LIBS) - list(REMOVE_DUPLICATES EXTRA_LIBS) -endif() -if(EXTRA_CFLAGS) - list(REMOVE_DUPLICATES EXTRA_CFLAGS) -endif() -listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) -set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) - -# Compat helpers for the configuration files -if(NOT WINDOWS OR CYGWIN) - # TODO: we need a Windows script, too - execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) - - set(prefix ${CMAKE_INSTALL_PREFIX}) - set(exec_prefix "\${prefix}") - set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") - set(bindir "\${exec_prefix}/bin") - set(includedir "\${prefix}/include") - if(SDL_STATIC) - set(ENABLE_STATIC_TRUE "") - set(ENABLE_STATIC_FALSE "#") - else() - set(ENABLE_STATIC_TRUE "#") - set(ENABLE_STATIC_FALSE "") - endif() - if(SDL_SHARED) - set(ENABLE_SHARED_TRUE "") - set(ENABLE_SHARED_FALSE "#") - else() - set(ENABLE_SHARED_TRUE "#") - set(ENABLE_SHARED_FALSE "") - endif() - - # Clean up the different lists - listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") - set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) - list(REMOVE_DUPLICATES SDL_STATIC_LIBS) - listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) - set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) - listtostr(SDL_LIBS _SDL_LIBS) - set(SDL_LIBS ${_SDL_LIBS}) - - # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") - # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") - - configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" - "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config") - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" - "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) -endif() - -##### Info output ##### -message(STATUS "") -message(STATUS "SDL2 was configured with the following options:") -message(STATUS "") -message(STATUS "Platform: ${CMAKE_SYSTEM}") -message(STATUS "64-bit: ${ARCH_64}") -message(STATUS "Compiler: ${CMAKE_C_COMPILER}") -message(STATUS "") -message(STATUS "Subsystems:") -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - message_bool_option(${_SUB} SDL_${_OPT}) -endforeach() -message(STATUS "") -message(STATUS "Options:") -list(SORT ALLOPTIONS) -foreach(_OPT ${ALLOPTIONS}) - # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters - # Get the padding - string(LENGTH ${_OPT} _OPTLEN) - math(EXPR _PADLEN "23 - ${_OPTLEN}") - string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) - message_tested_option(${_OPT} ${_PADDING}) -endforeach() -message(STATUS "") -message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") -message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") -message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") -message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") -message(STATUS "") -message(STATUS " Build Shared Library: ${SDL_SHARED}") -message(STATUS " Build Static Library: ${SDL_STATIC}") -message(STATUS "") -if(UNIX) - message(STATUS "If something was not detected, although the libraries") - message(STATUS "were installed, then make sure you have set the") - message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") - message(STATUS "") -endif() - -# Ensure that the extra cflags are used at compile time -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") - -# Always build SDLmain -add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) -set(_INSTALL_LIBS "SDL2main") - -if(SDL_SHARED) - add_library(SDL2 SHARED ${SOURCE_FILES}) - if(UNIX) - set_target_properties(SDL2 PROPERTIES - VERSION ${LT_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2-${LT_RELEASE}") - else() - set_target_properties(SDL2 PROPERTIES - VERSION ${SDL_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2") - endif() - set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) - target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) -endif() - -if(SDL_STATIC) - set (BUILD_SHARED_LIBS FALSE) - add_library(SDL2-static STATIC ${SOURCE_FILES}) - set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") - if(MSVC) - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - # TODO: Win32 platforms keep the same suffix .lib for import and static - # libraries - do we need to consider this? - set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) - target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) -endif() - -##### Installation targets ##### -install(TARGETS ${_INSTALL_LIBS} - LIBRARY DESTINATION "lib${LIB_SUFFIX}" - ARCHIVE DESTINATION "lib${LIB_SUFFIX}") - -file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) -file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) -foreach(_FNAME ${BIN_INCLUDE_FILES}) - get_filename_component(_INCNAME ${_FNAME} NAME) - list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) -endforeach() -list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) -install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) - -if(NOT WINDOWS OR CYGWIN) - if(SDL_SHARED) - install(CODE " - execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink - \"libSDL2-2.0.so\" \"libSDL2.so\")") - install(FILES ${SDL2_BINARY_DIR}/libSDL2.so DESTINATION "lib${LIB_SUFFIX}") - endif() - if(FREEBSD) - # FreeBSD uses ${PREFIX}/libdata/pkgconfig - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") - else() - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc - DESTINATION "lib${LIB_SUFFIX}/pkgconfig") - endif() - install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) - # TODO: what about the .spec file? Is it only needed for RPM creation? - install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "share/aclocal") -else() - install(TARGETS SDL2 RUNTIME DESTINATION bin) -endif() - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt deleted file mode 100644 index bbad766..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt +++ /dev/null @@ -1,1849 +0,0 @@ - - -cmake_minimum_required(VERSION 2.8.11) -project(SDL2 C) - -# !!! FIXME: this should probably do "MACOSX_RPATH ON" as a target property -# !!! FIXME: for the SDL2 shared library (so you get an -# !!! FIXME: install_name ("soname") of "@rpath/libSDL-whatever.dylib" -# !!! FIXME: instead of "/usr/local/lib/libSDL-whatever.dylib"), but I'm -# !!! FIXME: punting for now and leaving the existing behavior. Until this -# !!! FIXME: properly resolved, this line silences a warning in CMake 3.0+. -# !!! FIXME: remove it and this comment entirely once the problem is -# !!! FIXME: properly resolved. -#cmake_policy(SET CMP0042 OLD) - -include(CheckFunctionExists) -include(CheckLibraryExists) -include(CheckIncludeFiles) -include(CheckIncludeFile) -include(CheckSymbolExists) -include(CheckCSourceCompiles) -include(CheckCSourceRuns) -include(CheckCCompilerFlag) -include(CheckTypeSize) -include(CheckStructHasMember) -include(CMakeDependentOption) -include(FindPkgConfig) -include(GNUInstallDirs) -set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") -include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) -include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) - -# General settings -# Edit include/SDL_version.h and change the version, then: -# SDL_MICRO_VERSION += 1; -# SDL_INTERFACE_AGE += 1; -# SDL_BINARY_AGE += 1; -# if any functions have been added, set SDL_INTERFACE_AGE to 0. -# if backwards compatibility has been broken, -# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. -set(SDL_MAJOR_VERSION 2) -set(SDL_MINOR_VERSION 0) -set(SDL_MICRO_VERSION 8) -set(SDL_INTERFACE_AGE 0) -set(SDL_BINARY_AGE 8) -set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") - -# Set defaults preventing destination file conflicts -set(SDL_CMAKE_DEBUG_POSTFIX "d" - CACHE STRING "Name suffix for debug builds") - -mark_as_advanced(CMAKE_IMPORT_LIBRARY_SUFFIX SDL_CMAKE_DEBUG_POSTFIX) - -# Calculate a libtool-like version number -math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") -set(LT_REVISION "${SDL_INTERFACE_AGE}") -set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") -set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") - -message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") - -# General settings & flags -set(LIBRARY_OUTPUT_DIRECTORY "build") -# Check for 64 or 32 bit -set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) -if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(ARCH_64 TRUE) - set(PROCESSOR_ARCH "x64") -else() - set(ARCH_64 FALSE) - set(PROCESSOR_ARCH "x86") -endif() -set(LIBNAME SDL2) -if(NOT LIBTYPE) - set(LIBTYPE SHARED) -endif() - -# Get the platform -if(WIN32) - if(NOT WINDOWS) - set(WINDOWS TRUE) - endif() -elseif(UNIX AND NOT APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") - set(LINUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") - set(NETBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") - set(OPENBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") - set(GNU TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") - set(BSDI TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") - set(SYSV5 TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") - set(SOLARIS TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") - set(HPUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") - set(AIX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") - set(MINIX TRUE) - endif() -elseif(APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") - set(DARWIN TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") - set(MACOSX TRUE) - endif() - # TODO: iOS? -elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") - message_error("BeOS support has been removed as of SDL 2.0.2.") -elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") - set(HAIKU TRUE) -endif() - -# Don't mistake osx for unix -if(UNIX AND NOT APPLE) - set(UNIX_SYS ON) -else() - set(UNIX_SYS OFF) -endif() - -if(UNIX OR APPLE) - set(UNIX_OR_MAC_SYS ON) -else() - set(UNIX_OR_MAC_SYS OFF) -endif() - -if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. - set(SDL_PTHREADS_ENABLED_BY_DEFAULT ON) -else() - set(SDL_PTHREADS_ENABLED_BY_DEFAULT OFF) -endif() - -# Default option knobs -if(APPLE OR ARCH_64) - if(NOT "${CMAKE_OSX_ARCHITECTURES}" MATCHES "arm") - set(OPT_DEF_SSEMATH ON) - endif() -endif() -if(UNIX OR MINGW OR MSYS) - set(OPT_DEF_LIBC ON) -endif() - -# Compiler info -if(CMAKE_COMPILER_IS_GNUCC) - set(USE_GCC TRUE) - set(OPT_DEF_ASM TRUE) -elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") - set(USE_CLANG TRUE) - set(OPT_DEF_ASM TRUE) -elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ - set(OPT_DEF_ASM TRUE) - #set(CMAKE_C_FLAGS "/ZI /WX- / -else() - set(OPT_DEF_ASM FALSE) -endif() - -if(USE_GCC OR USE_CLANG) - set(OPT_DEF_GCC_ATOMICS ON) -endif() - -# Default flags, if not set otherwise -if("$ENV{CFLAGS}" STREQUAL "") - if(CMAKE_BUILD_TYPE STREQUAL "") - if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "-g -O3") - endif() - endif() -else() - set(CMAKE_C_FLAGS "$ENV{CFLAGS}") - list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") -endif() -if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 - list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") -endif() - -if(MSVC) - option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) - if(FORCE_STATIC_VCRT) - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - if(${flag_var} MATCHES "/MD") - string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - endif() - endforeach() - endif() - - # Make sure /RTC1 is disabled, otherwise it will use functions from the CRT - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - string(REGEX REPLACE "/RTC(su|[1su])" "" ${flag_var} "${${flag_var}}") - endforeach(flag_var) -endif() - -# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, -# etc. are created correctly. -set(SDL_LIBS "-lSDL2") -set(SDL_CFLAGS "") - -# Emscripten toolchain has a nonempty default value for this, and the checks -# in this file need to change that, so remember the original value, and -# restore back to that afterwards. For check_function_exists() to work in -# Emscripten, this value must be at its default value. -set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) - -if(CYGWIN) - # We build SDL on cygwin without the UNIX emulation layer - include_directories("-I/usr/include/mingw") - set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mno-cygwin") - check_c_source_compiles("int main(int argc, char **argv) {}" - HAVE_GCC_NO_CYGWIN) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_GCC_NO_CYGWIN) - list(APPEND EXTRA_LDFLAGS "-mno-cygwin") - list(APPEND SDL_LIBS "-mno-cygwin") - endif() - set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") -endif() - -add_definitions(-DUSING_GENERATED_CONFIG_H) -# General includes -include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) -if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -idirafter ${SDL2_SOURCE_DIR}/src/video/khronos") -else() - include_directories(${SDL2_SOURCE_DIR}/src/video/khronos) -endif() - -# All these ENABLED_BY_DEFAULT vars will default to ON if not specified, so -# you only need to have a platform override them if they are disabling. -set(OPT_DEF_ASM TRUE) -if(EMSCRIPTEN) - # Set up default values for the currently supported set of subsystems: - # Emscripten/Javascript does not have assembly support, a dynamic library - # loading architecture, low-level CPU inspection or multithreading. - set(OPT_DEF_ASM FALSE) - set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) - set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) - set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) - set(SDL_DLOPEN_ENABLED_BY_DEFAULT OFF) -endif() - -if (NOT DEFINED SDL_SHARED_ENABLED_BY_DEFAULT) - set(SDL_SHARED_ENABLED_BY_DEFAULT ON) -endif() - -set(SDL_SUBSYSTEMS - Atomic Audio Video Render Events Joystick Haptic Power Threads Timers - File Loadso CPUinfo Filesystem Dlopen) -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) - set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) - endif() - option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) -endforeach() - -option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") -#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) -set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) -set_option(GCC_ATOMICS "Use gcc builtin atomics" ${OPT_DEF_GCC_ATOMICS}) -set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) -set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) -set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) -set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) -set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) -set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(SSE3 "Use SSE3 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) -set_option(DISKAUDIO "Support the disk writer audio driver" ON) -set_option(DUMMYAUDIO "Support the dummy audio driver" ON) -set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) -dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) -set_option(VIDEO_DUMMY "Use dummy video driver" ON) -set_option(VIDEO_OPENGL "Include OpenGL support" ON) -set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) -set_option(PTHREADS "Use POSIX threads for multi-threading" ${SDL_PTHREADS_ENABLED_BY_DEFAULT}) -dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) -set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${SDL_DLOPEN_ENABLED_BY_DEFAULT}) -set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) -set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) -dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) -set_option(JACK "Support the JACK audio API" ${UNIX_SYS}) -dep_option(JACK_SHARED "Dynamically load JACK audio support" ON "JACK" OFF) -set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) -dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) -set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) -dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) -set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) -dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) -set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) -set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) -set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) -set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) -dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND" OFF) -set_option(LIBSAMPLERATE "Use libsamplerate for audio rate conversion" ${UNIX_SYS}) -dep_option(LIBSAMPLERATE_SHARED "Dynamically load libsamplerate" ON "LIBSAMPLERATE" OFF) -set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) -set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) -set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) -set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) -set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) -dep_option(WAYLAND_SHARED "Dynamically load Wayland support" ON "VIDEO_WAYLAND" OFF) -dep_option(VIDEO_WAYLAND_QT_TOUCH "QtWayland server support for Wayland video driver" ON "VIDEO_WAYLAND" OFF) -set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) -dep_option(MIR_SHARED "Dynamically load Mir support" ON "VIDEO_MIR" OFF) -set_option(VIDEO_RPI "Use Raspberry Pi video driver" ${UNIX_SYS}) -dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) -set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) -foreach(_SUB ${SDL_X11_OPTIONS}) - string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) - dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) -endforeach() -set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) -set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) -set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) -set_option(VIDEO_VIVANTE "Use Vivante EGL video driver" ${UNIX_SYS}) -dep_option(VIDEO_VULKAN "Enable Vulkan support" ON "ANDROID OR APPLE OR LINUX OR WINDOWS" OFF) -set_option(VIDEO_KMSDRM "Use KMS DRM video driver" ${UNIX_SYS}) -dep_option(KMSDRM_SHARED "Dynamically load KMS DRM support" ON "VIDEO_KMSDRM" OFF) - -# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here -# The options below are for compatibility to configure's default behaviour. -# set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") -set(SDL_SHARED ON CACHE BOOL "Build a shared version of the library") -set(SDL_STATIC OFF CACHE BOOL "Build a static version of the library") - -dep_option(SDL_STATIC_PIC "Static version of the library should be built with Position Independent Code" OFF "SDL_STATIC" OFF) -set_option(SDL_TEST "Build the test directory" OFF) - - -# General source files -file(GLOB SOURCE_FILES - ${SDL2_SOURCE_DIR}/src/*.c - ${SDL2_SOURCE_DIR}/src/atomic/*.c - ${SDL2_SOURCE_DIR}/src/audio/*.c - ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c - ${SDL2_SOURCE_DIR}/src/dynapi/*.c - ${SDL2_SOURCE_DIR}/src/events/*.c - ${SDL2_SOURCE_DIR}/src/file/*.c - ${SDL2_SOURCE_DIR}/src/libm/*.c - ${SDL2_SOURCE_DIR}/src/render/*.c - ${SDL2_SOURCE_DIR}/src/render/*/*.c - ${SDL2_SOURCE_DIR}/src/stdlib/*.c - ${SDL2_SOURCE_DIR}/src/thread/*.c - ${SDL2_SOURCE_DIR}/src/timer/*.c - ${SDL2_SOURCE_DIR}/src/video/*.c - ${SDL2_SOURCE_DIR}/src/video/yuv2rgb/*.c) - - -if(ASSERTIONS STREQUAL "auto") - # Do nada - use optimization settings to determine the assertion level -elseif(ASSERTIONS STREQUAL "disabled") - set(SDL_DEFAULT_ASSERT_LEVEL 0) -elseif(ASSERTIONS STREQUAL "release") - set(SDL_DEFAULT_ASSERT_LEVEL 1) -elseif(ASSERTIONS STREQUAL "enabled") - set(SDL_DEFAULT_ASSERT_LEVEL 2) -elseif(ASSERTIONS STREQUAL "paranoid") - set(SDL_DEFAULT_ASSERT_LEVEL 3) -else() - message_error("unknown assertion level") -endif() -set(HAVE_ASSERTIONS ${ASSERTIONS}) - -# Compiler option evaluation -if(USE_GCC OR USE_CLANG) - # Check for -Wall first, so later things can override pieces of it. - check_c_compiler_flag(-Wall HAVE_GCC_WALL) - if(HAVE_GCC_WALL) - list(APPEND EXTRA_CFLAGS "-Wall") - if(HAIKU) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") - endif() - endif() - - check_c_compiler_flag(-Wdeclaration-after-statement HAVE_GCC_WDECLARATION_AFTER_STATEMENT) - if(HAVE_GCC_WDECLARATION_AFTER_STATEMENT) - check_c_compiler_flag(-Werror=declaration-after-statement HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) - if(HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) - list(APPEND EXTRA_CFLAGS "-Werror=declaration-after-statement") - endif() - list(APPEND EXTRA_CFLAGS "-Wdeclaration-after-statement") - endif() - - if(DEPENDENCY_TRACKING) - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 3 - #error Dependency tracking requires GCC 3.0 or newer - #endif - int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) - endif() - - if(GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - void *x, *y, *z; - __sync_lock_test_and_set(&a, 4); - __sync_lock_test_and_set(&x, y); - __sync_fetch_and_add(&a, 1); - __sync_bool_compare_and_swap(&a, 5, 10); - __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) - if(NOT HAVE_GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - __sync_lock_test_and_set(&a, 1); - __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) - endif() - endif() - - set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") - check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" - HAVE_GCC_PREFERRED_STACK_BOUNDARY) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror") - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 4 - #error SDL only uses visibility attributes in GCC 4 or newer - #endif - int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) - if(HAVE_GCC_FVISIBILITY) - list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - check_c_compiler_flag(-Wshadow HAVE_GCC_WSHADOW) - if(HAVE_GCC_WSHADOW) - list(APPEND EXTRA_CFLAGS "-Wshadow") - endif() - - if(APPLE) - list(APPEND EXTRA_LDFLAGS "-Wl,-undefined,error") - else() - set(CMAKE_REQUIRED_FLAGS "-Wl,--no-undefined") - check_c_compiler_flag("" HAVE_NO_UNDEFINED) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_NO_UNDEFINED) - list(APPEND EXTRA_LDFLAGS "-Wl,--no-undefined") - endif() - endif() -endif() - -if(ASSEMBLY) - if(USE_GCC OR USE_CLANG) - set(SDL_ASSEMBLY_ROUTINES 1) - # TODO: Those all seem to be quite GCC specific - needs to be - # reworked for better compiler support - set(HAVE_ASSEMBLY TRUE) - if(MMX) - set(CMAKE_REQUIRED_FLAGS "-mmmx") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __MMX__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_MMX) - if(HAVE_MMX) - list(APPEND EXTRA_CFLAGS "-mmmx") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(3DNOW) - set(CMAKE_REQUIRED_FLAGS "-m3dnow") - check_c_source_compiles(" - #include - #ifndef __3dNOW__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { - void *p = 0; - _m_prefetch(p); - }" HAVE_3DNOW) - if(HAVE_3DNOW) - list(APPEND EXTRA_CFLAGS "-m3dnow") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE) - set(CMAKE_REQUIRED_FLAGS "-msse") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE) - if(HAVE_SSE) - list(APPEND EXTRA_CFLAGS "-msse") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE2) - set(CMAKE_REQUIRED_FLAGS "-msse2") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE2__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE2) - if(HAVE_SSE2) - list(APPEND EXTRA_CFLAGS "-msse2") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE3) - set(CMAKE_REQUIRED_FLAGS "-msse3") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE3__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE3) - if(HAVE_SSE3) - list(APPEND EXTRA_CFLAGS "-msse3") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(NOT SSEMATH) - if(SSE OR SSE2 OR SSE3) - if(USE_GCC) - check_c_compiler_flag(-mfpmath=387 HAVE_FP_387) - if(HAVE_FP_387) - list(APPEND EXTRA_CFLAGS "-mfpmath=387") - endif() - endif() - set(HAVE_SSEMATH TRUE) - endif() - endif() - - check_include_file("immintrin.h" HAVE_IMMINTRIN_H) - - if(ALTIVEC) - set(CMAKE_REQUIRED_FLAGS "-maltivec") - check_c_source_compiles(" - #include - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) - check_c_source_compiles(" - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set - list(APPEND EXTRA_CFLAGS "-maltivec") - set(SDL_ALTIVEC_BLITTERS 1) - if(HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC_H 1) - endif() - endif() - endif() - elseif(MSVC_VERSION GREATER 1500) - # TODO: SDL_cpuinfo.h needs to support the user's configuration wish - # for MSVC - right now it is always activated - if(NOT ARCH_64) - set(HAVE_MMX TRUE) - set(HAVE_3DNOW TRUE) - endif() - set(HAVE_SSE TRUE) - set(HAVE_SSE2 TRUE) - set(HAVE_SSE3 TRUE) - set(SDL_ASSEMBLY_ROUTINES 1) - endif() -# TODO: -#else() -# if(USE_GCC OR USE_CLANG) -# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-sse3" "-mno-mmx") -# endif() -endif() - -# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define -# anything. -if(LIBC) - if(WINDOWS AND NOT MINGW) - set(HAVE_LIBC TRUE) - foreach(_HEADER stdio.h string.h wchar.h ctype.h math.h limits.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - set(${_HAVE_H} 1) - endforeach() - set(HAVE_SIGNAL_H 1) - foreach(_FN - malloc calloc realloc free qsort abs memset memcpy memmove memcmp - wcslen wcscmp - strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp - _stricmp _strnicmp sscanf - acos acosf asin asinf atan atanf atan2 atan2f ceil ceilf - copysign copysignf cos cosf fabs fabsf floor floorf fmod fmodf - log logf log10 log10f pow powf scalbn scalbnf sin sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(HAVE_${_UPPER} 1) - endforeach() - if(NOT CYGWIN AND NOT MINGW) - set(HAVE_ALLOCA 1) - endif() - set(HAVE_M_PI 1) - add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI - set(STDC_HEADERS 1) - else() - set(HAVE_LIBC TRUE) - check_include_file(sys/types.h HAVE_SYS_TYPES_H) - foreach(_HEADER - stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h limits.h - strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h libunwind.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - check_include_file("${_HEADER}" ${_HAVE_H}) - endforeach() - - check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) - check_type_size("size_t" SIZEOF_SIZE_T) - check_symbol_exists(M_PI math.h HAVE_M_PI) - # TODO: refine the mprotect check - check_c_source_compiles("#include - #include - int main() { }" HAVE_MPROTECT) - foreach(_FN - strtod malloc calloc realloc free getenv setenv putenv unsetenv - qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat - _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull - atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp - vsscanf vsnprintf fopen64 fseeko fseeko64 sigaction setjmp - nanosleep sysconf sysctlbyname getauxval poll - ) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - - check_library_exists(m pow "" HAVE_LIBM) - if(HAVE_LIBM) - set(CMAKE_REQUIRED_LIBRARIES m) - foreach(_FN - atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin - sinf sqrt sqrtf tan tanf acos asin) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - set(CMAKE_REQUIRED_LIBRARIES) - list(APPEND EXTRA_LIBS m) - endif() - - check_library_exists(iconv iconv_open "" HAVE_LIBICONV) - if(HAVE_LIBICONV) - list(APPEND EXTRA_LIBS iconv) - set(HAVE_ICONV 1) - endif() - - if(NOT APPLE) - check_include_file(alloca.h HAVE_ALLOCA_H) - check_function_exists(alloca HAVE_ALLOCA) - else() - set(HAVE_ALLOCA_H 1) - set(HAVE_ALLOCA 1) - endif() - - check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) - endif() -else() - if(WINDOWS) - set(HAVE_STDARG_H 1) - set(HAVE_STDDEF_H 1) - endif() -endif() - - -# Enable/disable various subsystems of the SDL library -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if(NOT SDL_${_OPT}) - set(SDL_${_OPT}_DISABLED 1) - endif() -endforeach() -if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) -endif() -if(SDL_HAPTIC) - if(NOT SDL_JOYSTICK) - # Haptic requires some private functions from the joystick subsystem. - message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") - endif() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(SDL_POWER) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) -endif() -# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: -# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set -# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), -# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings - -# General SDL subsystem options, valid for all platforms -if(SDL_AUDIO) - # CheckDummyAudio/CheckDiskAudio - valid for all platforms - if(DUMMYAUDIO) - set(SDL_AUDIO_DRIVER_DUMMY 1) - file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) - set(HAVE_DUMMYAUDIO TRUE) - endif() - if(DISKAUDIO) - set(SDL_AUDIO_DRIVER_DISK 1) - file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) - set(HAVE_DISKAUDIO TRUE) - endif() -endif() - -if(SDL_DLOPEN) - # Relevant for Unix/Darwin only - if(UNIX OR APPLE) - CheckDLOPEN() - endif() -endif() - -if(SDL_VIDEO) - if(VIDEO_DUMMY) - set(SDL_VIDEO_DRIVER_DUMMY 1) - file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) - set(HAVE_VIDEO_DUMMY TRUE) - set(HAVE_SDL_VIDEO TRUE) - endif() -endif() - -if(ANDROID) - file(GLOB ANDROID_CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_CORE_SOURCES}) - - # SDL_spinlock.c Needs to be compiled in ARM mode. - # There seems to be no better way currently to set the ARM mode. - # see: https://issuetracker.google.com/issues/62264618 - # Another option would be to set ARM mode to all compiled files - check_c_compiler_flag(-marm HAVE_ARM_MODE) - if(HAVE_ARM_MODE) - set_source_files_properties(${SDL2_SOURCE_DIR}/src/atomic/SDL_spinlock.c PROPERTIES COMPILE_FLAGS -marm) - endif() - - file(GLOB ANDROID_MAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/android/*.c) - set(SDLMAIN_SOURCES ${SDLMAIN_SOURCES} ${ANDROID_MAIN_SOURCES}) - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_ANDROID 1) - file(GLOB ANDROID_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_ANDROID 1) - file(GLOB ANDROID_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - if(SDL_HAPTIC) - set(SDL_HAPTIC_ANDROID 1) - file(GLOB ANDROID_HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_ANDROID 1) - file(GLOB ANDROID_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/android/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - if(SDL_LOADSO) - set(SDL_LOADSO_DLOPEN 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dlopen/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - if(SDL_POWER) - set(SDL_POWER_ANDROID 1) - file(GLOB ANDROID_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_ANDROID 1) - file(GLOB ANDROID_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_VIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - # Core stuff - find_library(ANDROID_DL_LIBRARY dl) - find_library(ANDROID_LOG_LIBRARY log) - find_library(ANDROID_LIBRARY_LIBRARY android) - list(APPEND EXTRA_LIBS ${ANDROID_DL_LIBRARY} ${ANDROID_LOG_LIBRARY} ${ANDROID_LIBRARY_LIBRARY}) - add_definitions(-DGL_GLEXT_PROTOTYPES) - - #enable gles - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(HAVE_VIDEO_OPENGLES TRUE) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - - find_library(OpenGLES1_LIBRARY GLESv1_CM) - find_library(OpenGLES2_LIBRARY GLESv2) - list(APPEND EXTRA_LIBS ${OpenGLES1_LIBRARY} ${OpenGLES2_LIBRARY}) - endif() - - CHECK_C_SOURCE_COMPILES(" - #if defined(__ARM_ARCH) && __ARM_ARCH < 7 - #error Vulkan doesn't work on this configuration - #endif - int main() - { - return 0; - } - " VULKAN_PASSED_ANDROID_CHECKS) - if(NOT VULKAN_PASSED_ANDROID_CHECKS) - set(VIDEO_VULKAN OFF) - message(STATUS "Vulkan doesn't work on this configuration") - endif() - endif() - - CheckPTHREAD() - -endif() - -# Platform-specific options and settings -if(EMSCRIPTEN) - # Hide noisy warnings that intend to aid mostly during initial stages of porting a new - # project. Uncomment at will for verbose cross-compiling -I/../ path info. - add_definitions(-Wno-warn-absolute-paths) - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_EMSCRIPTEN 1) - file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_EMSCRIPTEN 1) - file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_EMSCRIPTEN 1) - file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - if(SDL_POWER) - set(SDL_POWER_EMSCRIPTEN 1) - file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - - if(CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_EMSCRIPTEN 1) - file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - #enable gles - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(HAVE_VIDEO_OPENGLES TRUE) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - endif() - endif() -elseif(UNIX AND NOT APPLE AND NOT ANDROID) - if(SDL_AUDIO) - if(SYSV5 OR SOLARIS OR HPUX) - set(SDL_AUDIO_DRIVER_SUNAUDIO 1) - file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(NETBSD) - set(SDL_AUDIO_DRIVER_NETBSD 1) - file(GLOB NETBSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/netbsd/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${NETBSD_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(AIX) - set(SDL_AUDIO_DRIVER_PAUDIO 1) - file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - CheckOSS() - CheckALSA() - CheckJACK() - CheckPulseAudio() - CheckESD() - CheckARTS() - CheckNAS() - CheckSNDIO() - CheckFusionSound() - CheckLibSampleRate() - endif() - - if(SDL_VIDEO) - # Need to check for Raspberry PI first and add platform specific compiler flags, otherwise the test for GLES fails! - CheckRPI() - CheckX11() - CheckMir() - CheckDirectFB() - CheckOpenGLX11() - CheckOpenGLESX11() - CheckWayland() - CheckVivante() - CheckKMSDRM() - endif() - - if(UNIX) - file(GLOB CORE_UNIX_SOURCES ${SDL2_SOURCE_DIR}/src/core/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_UNIX_SOURCES}) - endif() - - if(LINUX) - check_c_source_compiles(" - #include - #ifndef EVIOCGNAME - #error EVIOCGNAME() ioctl not available - #endif - int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) - - check_c_source_compiles(" - #include - #include - - int main(int argc, char **argv) - { - struct kbentry kbe; - kbe.kb_table = KG_CTRL; - ioctl(0, KDGKBENT, &kbe); - }" HAVE_INPUT_KD) - - file(GLOB CORE_LINUX_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_LINUX_SOURCES}) - - if(HAVE_INPUT_EVENTS) - set(SDL_INPUT_LINUXEV 1) - endif() - - if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) - set(SDL_HAPTIC_LINUX 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - if(HAVE_INPUT_KD) - set(SDL_INPUT_LINUXKD 1) - endif() - - check_include_file("libudev.h" HAVE_LIBUDEV_H) - - if(PKG_CONFIG_FOUND) - pkg_search_module(DBUS dbus-1 dbus) - if(DBUS_FOUND) - set(HAVE_DBUS_DBUS_H TRUE) - include_directories(${DBUS_INCLUDE_DIRS}) - list(APPEND EXTRA_LIBS ${DBUS_LIBRARIES}) - endif() - - pkg_search_module(IBUS ibus-1.0 ibus) - if(IBUS_FOUND) - set(HAVE_IBUS_IBUS_H TRUE) - include_directories(${IBUS_INCLUDE_DIRS}) - list(APPEND EXTRA_LIBS ${IBUS_LIBRARIES}) - endif() - endif() - - check_include_file("fcitx/frontend.h" HAVE_FCITX_FRONTEND_H) - endif() - - if(INPUT_TSLIB) - check_c_source_compiles(" - #include \"tslib.h\" - int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) - if(HAVE_INPUT_TSLIB) - set(SDL_INPUT_TSLIB 1) - list(APPEND EXTRA_LIBS ts) - endif() - endif() - - if(SDL_JOYSTICK) - CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? - if(LINUX AND NOT ANDROID) - set(SDL_JOYSTICK_LINUX 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - endif() - - CheckPTHREAD() - - if(CLOCK_GETTIME) - check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - list(APPEND EXTRA_LIBS rt) - set(HAVE_CLOCK_GETTIME 1) - else() - check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - endif() - - check_include_file(linux/version.h HAVE_LINUX_VERSION_H) - if(HAVE_LINUX_VERSION_H) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") - endif() - - if(SDL_POWER) - if(LINUX) - set(SDL_POWER_LINUX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_UNIX 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(RPATH) - set(SDL_RLD_FLAGS "") - if(BSDI OR FREEBSD OR LINUX OR NETBSD) - set(CMAKE_REQUIRED_FLAGS "-Wl,--enable-new-dtags") - check_c_compiler_flag("" HAVE_ENABLE_NEW_DTAGS) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ENABLE_NEW_DTAGS) - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir} -Wl,--enable-new-dtags") - else() - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") - endif() - elseif(SOLARIS) - set(SDL_RLD_FLAGS "-R\${libdir}") - endif() - set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - set(HAVE_RPATH TRUE) - endif() - -elseif(WINDOWS) - find_program(WINDRES windres) - - check_c_source_compiles(" - #include - int main(int argc, char **argv) { }" HAVE_WIN32_CC) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(MSVC) - # Prevent codegen that would use the VC runtime libraries. - set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/GS-") - if(NOT ARCH_64) - set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/arch:SSE") - endif() - endif() - - # Check for DirectX - if(DIRECTX) - if(DEFINED MSVC_VERSION AND NOT ${MSVC_VERSION} LESS 1700) - set(USE_WINSDK_DIRECTX TRUE) - endif() - if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) - if("$ENV{DXSDK_DIR}" STREQUAL "") - message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") - endif() - set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") - endif() - - if(HAVE_WIN32_CC) - # xinput.h may need windows.h, but doesn't include it itself. - check_c_source_compiles(" - #include - #include - int main(int argc, char **argv) { }" HAVE_XINPUT_H) - check_c_source_compiles(" - #include - #include - XINPUT_GAMEPAD_EX x1; - int main(int argc, char **argv) { }" HAVE_XINPUT_GAMEPAD_EX) - check_c_source_compiles(" - #include - #include - XINPUT_STATE_EX s1; - int main(int argc, char **argv) { }" HAVE_XINPUT_STATE_EX) - else() - check_include_file(xinput.h HAVE_XINPUT_H) - endif() - - check_include_file(d3d9.h HAVE_D3D_H) - check_include_file(d3d11_1.h HAVE_D3D11_H) - check_include_file(ddraw.h HAVE_DDRAW_H) - check_include_file(dsound.h HAVE_DSOUND_H) - check_include_file(dinput.h HAVE_DINPUT_H) - check_include_file(mmdeviceapi.h HAVE_MMDEVICEAPI_H) - check_include_file(audioclient.h HAVE_AUDIOCLIENT_H) - check_include_file(dxgi.h HAVE_DXGI_H) - if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H) - set(HAVE_DIRECTX TRUE) - if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) - # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks - link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) - include_directories($ENV{DXSDK_DIR}\\Include) - endif() - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_WINMM 1) - file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - - if(HAVE_DSOUND_H) - set(SDL_AUDIO_DRIVER_DSOUND 1) - file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) - endif() - - if(HAVE_AUDIOCLIENT_H AND HAVE_MMDEVICEAPI_H) - set(SDL_AUDIO_DRIVER_WASAPI 1) - file(GLOB WASAPI_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/wasapi/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WASAPI_AUDIO_SOURCES}) - endif() - endif() - - if(SDL_VIDEO) - # requires SDL_LOADSO on Windows (IME, DX, etc.) - if(NOT SDL_LOADSO) - message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") - endif() - set(SDL_VIDEO_DRIVER_WINDOWS 1) - file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) - - if(RENDER_D3D AND HAVE_D3D_H) - set(SDL_VIDEO_RENDER_D3D 1) - set(HAVE_RENDER_D3D TRUE) - endif() - if(RENDER_D3D AND HAVE_D3D11_H) - set(SDL_VIDEO_RENDER_D3D11 1) - set(HAVE_RENDER_D3D TRUE) - endif() - set(HAVE_SDL_VIDEO TRUE) - endif() - - if(SDL_THREADS) - set(SDL_THREAD_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c - ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) - set(HAVE_SDL_THREADS TRUE) - endif() - - if(SDL_POWER) - set(SDL_POWER_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) - set(HAVE_SDL_POWER TRUE) - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_WINDOWS 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Libraries for Win32 native and MinGW - list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) - - # TODO: in configure.in the check for timers is set on - # cygwin | mingw32* - does this include mingw32CE? - if(SDL_TIMERS) - set(SDL_TIMER_WINDOWS 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(SDL_LOADSO) - set(SDL_LOADSO_WINDOWS 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(SDL_VIDEO) - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_WGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - set(HAVE_VIDEO_OPENGLES TRUE) - endif() - endif() - - if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - if(HAVE_DINPUT_H) - set(SDL_JOYSTICK_DINPUT 1) - list(APPEND EXTRA_LIBS dinput8) - if(CMAKE_COMPILER_IS_MINGW) - list(APPEND EXTRA_LIBS dxerr8) - elseif (NOT USE_WINSDK_DIRECTX) - list(APPEND EXTRA_LIBS dxerr) - endif() - endif() - if(HAVE_XINPUT_H) - set(SDL_JOYSTICK_XINPUT 1) - endif() - if(NOT HAVE_DINPUT_H AND NOT HAVE_XINPUT_H) - set(SDL_JOYSTICK_WINMM 1) - endif() - set(HAVE_SDL_JOYSTICK TRUE) - - if(SDL_HAPTIC) - if(HAVE_DINPUT_H OR HAVE_XINPUT_H) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/windows/*.c) - if(HAVE_DINPUT_H) - set(SDL_HAPTIC_DINPUT 1) - endif() - if(HAVE_XINPUT_H) - set(SDL_HAPTIC_XINPUT 1) - endif() - else() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SDL_HAPTIC_DUMMY 1) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - endif() - - file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) - if(MINGW OR CYGWIN) - list(APPEND EXTRA_LIBS mingw32) - list(APPEND EXTRA_LDFLAGS "-mwindows") - set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") - list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") - endif() -elseif(APPLE) - # TODO: rework this all for proper MacOS X, iOS and Darwin support - - # We always need these libs on macOS at the moment. - # !!! FIXME: we need Carbon for some very old API calls in - # !!! FIXME: src/video/cocoa/SDL_cocoakeyboard.c, but we should figure out - # !!! FIXME: how to dump those. - if(NOT IOS) - set(SDL_FRAMEWORK_COCOA 1) - set(SDL_FRAMEWORK_CARBON 1) - endif() - - # Requires the darwin file implementation - if(SDL_FILE) - file(GLOB EXTRA_SOURCES ${SDL2_SOURCE_DIR}/src/file/cocoa/*.m) - set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) - set(HAVE_SDL_FILE TRUE) - # !!! FIXME: why is COREVIDEO inside this if() block? - set(SDL_FRAMEWORK_COREVIDEO 1) - else() - message_error("SDL_FILE must be enabled to build on MacOS X") - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_COREAUDIO 1) - file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.m) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${AUDIO_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - set(SDL_FRAMEWORK_COREAUDIO 1) - set(SDL_FRAMEWORK_AUDIOTOOLBOX 1) - endif() - - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_IOKIT 1) - if (IOS) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/iphoneos/*.m ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - else() - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - endif() - - if(SDL_HAPTIC) - set(SDL_HAPTIC_IOKIT 1) - if (IOS) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SDL_HAPTIC_DUMMY 1) - else() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - if(NOT SDL_JOYSTICK) - message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") - endif() - endif() - - if(SDL_POWER) - set(SDL_POWER_MACOSX 1) - if (IOS) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/uikit/*.m) - else() - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_COCOA 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Actually load the frameworks at the end so we don't duplicate include. - if(SDL_FRAMEWORK_COREVIDEO) - find_library(COREVIDEO CoreVideo) - list(APPEND EXTRA_LIBS ${COREVIDEO}) - endif() - if(SDL_FRAMEWORK_COCOA) - find_library(COCOA_LIBRARY Cocoa) - list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) - endif() - if(SDL_FRAMEWORK_IOKIT) - find_library(IOKIT IOKit) - list(APPEND EXTRA_LIBS ${IOKIT}) - endif() - if(SDL_FRAMEWORK_FF) - find_library(FORCEFEEDBACK ForceFeedback) - list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) - endif() - if(SDL_FRAMEWORK_CARBON) - find_library(CARBON_LIBRARY Carbon) - list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) - endif() - if(SDL_FRAMEWORK_COREAUDIO) - find_library(COREAUDIO CoreAudio) - list(APPEND EXTRA_LIBS ${COREAUDIO}) - endif() - if(SDL_FRAMEWORK_AUDIOTOOLBOX) - find_library(AUDIOTOOLBOX AudioToolbox) - list(APPEND EXTRA_LIBS ${AUDIOTOOLBOX}) - endif() - - # iOS hack needed - http://code.google.com/p/ios-cmake/ ? - if(SDL_VIDEO) - if (IOS) - set(SDL_VIDEO_DRIVER_UIKIT 1) - file(GLOB UIKITVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/uikit/*.m) - set(SOURCE_FILES ${SOURCE_FILES} ${UIKITVIDEO_SOURCES}) - else() - CheckCOCOA() - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_CGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - set(HAVE_VIDEO_OPENGLES TRUE) - endif() - endif() - endif() - - CheckPTHREAD() -elseif(HAIKU) - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_HAIKU 1) - file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - set(SDL_FILESYSTEM_HAIKU 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - - if(SDL_TIMERS) - set(SDL_TIMER_HAIKU 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(VIDEO_OPENGL) - # TODO: Use FIND_PACKAGE(OpenGL) instead - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_BGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - list(APPEND EXTRA_LIBS GL) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -endif() - -if(VIDEO_VULKAN) - set(SDL_VIDEO_VULKAN 1) -endif() - -# Dummies -# configure.in does it differently: -# if not have X -# if enable_X { SDL_X_DISABLED = 1 } -# [add dummy sources] -# so it always adds a dummy, without checking, if it was actually requested. -# This leads to missing internal references on building, since the -# src/X/*.c does not get included. -if(NOT HAVE_SDL_JOYSTICK) - set(SDL_JOYSTICK_DISABLED 1) - if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX - - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - endif() -endif() -if(NOT HAVE_SDL_HAPTIC) - set(SDL_HAPTIC_DISABLED 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(NOT HAVE_SDL_LOADSO) - set(SDL_LOADSO_DISABLED 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) -endif() -if(NOT HAVE_SDL_FILESYSTEM) - set(SDL_FILESYSTEM_DISABLED 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) -endif() - -# We always need to have threads and timers around -if(NOT HAVE_SDL_THREADS) - set(SDL_THREADS_DISABLED 1) - file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) -endif() -if(NOT HAVE_SDL_TIMERS) - set(SDL_TIMERS_DISABLED 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) -endif() - -if(NOT SDLMAIN_SOURCES) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) -endif() - -# Append the -MMD -MT flags -# if(DEPENDENCY_TRACKING) -# if(COMPILER_IS_GNUCC) -# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") -# endif() -# endif() - -configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" - "${SDL2_BINARY_DIR}/include/SDL_config.h") - -# Prepare the flags and remove duplicates -if(EXTRA_LDFLAGS) - list(REMOVE_DUPLICATES EXTRA_LDFLAGS) -endif() -if(EXTRA_LIBS) - list(REMOVE_DUPLICATES EXTRA_LIBS) -endif() -if(EXTRA_CFLAGS) - list(REMOVE_DUPLICATES EXTRA_CFLAGS) -endif() -listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) -set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) - -# Compat helpers for the configuration files -if(NOT WINDOWS OR CYGWIN) - # TODO: we need a Windows script, too - execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) - - set(prefix ${CMAKE_INSTALL_PREFIX}) - set(exec_prefix "\${prefix}") - set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") - set(bindir "\${exec_prefix}/bin") - set(includedir "\${prefix}/include") - if(SDL_STATIC) - set(ENABLE_STATIC_TRUE "") - set(ENABLE_STATIC_FALSE "#") - else() - set(ENABLE_STATIC_TRUE "#") - set(ENABLE_STATIC_FALSE "") - endif() - if(SDL_SHARED) - set(ENABLE_SHARED_TRUE "") - set(ENABLE_SHARED_FALSE "#") - else() - set(ENABLE_SHARED_TRUE "#") - set(ENABLE_SHARED_FALSE "") - endif() - - # Clean up the different lists - listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") - set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) - list(REMOVE_DUPLICATES SDL_STATIC_LIBS) - listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) - set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) - listtostr(SDL_LIBS _SDL_LIBS) - set(SDL_LIBS ${_SDL_LIBS}) - - # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") - # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") - - configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" - "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config") - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" - "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) -endif() - -##### Info output ##### -message(STATUS "") -message(STATUS "SDL2 was configured with the following options:") -message(STATUS "") -message(STATUS "Platform: ${CMAKE_SYSTEM}") -message(STATUS "64-bit: ${ARCH_64}") -message(STATUS "Compiler: ${CMAKE_C_COMPILER}") -message(STATUS "") -message(STATUS "Subsystems:") -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - message_bool_option(${_SUB} SDL_${_OPT}) -endforeach() -message(STATUS "") -message(STATUS "Options:") -list(SORT ALLOPTIONS) -foreach(_OPT ${ALLOPTIONS}) - # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters - # Get the padding - string(LENGTH ${_OPT} _OPTLEN) - math(EXPR _PADLEN "23 - ${_OPTLEN}") - string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) - message_tested_option(${_OPT} ${_PADDING}) -endforeach() -message(STATUS "") -message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") -message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") -message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") -message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") -message(STATUS "") -message(STATUS " Build Shared Library: ${SDL_SHARED}") -message(STATUS " Build Static Library: ${SDL_STATIC}") -if(SDL_STATIC) - message(STATUS " Build Static Library with Position Independent Code: ${SDL_STATIC_PIC}") -endif() -message(STATUS "") -if(UNIX) - message(STATUS "If something was not detected, although the libraries") - message(STATUS "were installed, then make sure you have set the") - message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") - message(STATUS "") -endif() - -# Ensure that the extra cflags are used at compile time -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") - -# Always build SDLmain -add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) -target_include_directories(SDL2main PUBLIC $) -set(_INSTALL_LIBS "SDL2main") -if (NOT ANDROID) - set_target_properties(SDL2main PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) -endif() - -if(SDL_SHARED) - add_library(SDL2 SHARED ${SOURCE_FILES} ${VERSION_SOURCES}) - if(APPLE) - set_target_properties(SDL2 PROPERTIES MACOSX_RPATH 1) - elseif(UNIX AND NOT ANDROID) - set_target_properties(SDL2 PROPERTIES - VERSION ${LT_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2-${LT_RELEASE}") - else() - set_target_properties(SDL2 PROPERTIES - VERSION ${SDL_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2") - endif() - if(MSVC AND NOT LIBC) - # Don't try to link with the default set of libraries. - set_target_properties(SDL2 PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2 PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2 PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) - target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) - target_include_directories(SDL2 PUBLIC $) - if (NOT ANDROID) - set_target_properties(SDL2 PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) - endif() -endif() - -if(SDL_STATIC) - set (BUILD_SHARED_LIBS FALSE) - add_library(SDL2-static STATIC ${SOURCE_FILES}) - if (NOT SDL_SHARED OR NOT WIN32) - set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") - # Note: Apparently, OUTPUT_NAME must really be unique; even when - # CMAKE_IMPORT_LIBRARY_SUFFIX or the like are given. Otherwise - # the static build may race with the import lib and one will get - # clobbered, when the suffix is realized via subsequent rename. - endif() - set_target_properties(SDL2-static PROPERTIES POSITION_INDEPENDENT_CODE ${SDL_STATIC_PIC}) - if(MSVC AND NOT LIBC) - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - # TODO: Win32 platforms keep the same suffix .lib for import and static - # libraries - do we need to consider this? - set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) - target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) - target_include_directories(SDL2-static PUBLIC $) - if (NOT ANDROID) - set_target_properties(SDL2-static PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) - endif() -endif() - -##### Tests ##### - -if(SDL_TEST) - file(GLOB TEST_SOURCES ${SDL2_SOURCE_DIR}/src/test/*.c) - add_library(SDL2_test STATIC ${TEST_SOURCES}) - - add_subdirectory(test) -endif() - -##### Installation targets ##### -install(TARGETS ${_INSTALL_LIBS} EXPORT SDL2Targets - LIBRARY DESTINATION "lib${LIB_SUFFIX}" - ARCHIVE DESTINATION "lib${LIB_SUFFIX}" - RUNTIME DESTINATION bin) - -##### Export files ##### -if (APPLE) - set(PKG_PREFIX "SDL2.framework/Resources") -elseif (WINDOWS) - set(PKG_PREFIX "cmake") -else () - set(PKG_PREFIX "lib/cmake/SDL2") -endif () - -include(CMakePackageConfigHelpers) -write_basic_package_version_file("${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake" - VERSION ${SDL_VERSION} - COMPATIBILITY AnyNewerVersion -) - -install(EXPORT SDL2Targets - FILE SDL2Targets.cmake - NAMESPACE SDL2:: - DESTINATION ${PKG_PREFIX} -) -install( - FILES - ${CMAKE_CURRENT_SOURCE_DIR}/SDL2Config.cmake - ${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake - DESTINATION ${PKG_PREFIX} - COMPONENT Devel -) - -file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) -file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) -foreach(_FNAME ${BIN_INCLUDE_FILES}) - get_filename_component(_INCNAME ${_FNAME} NAME) - list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) -endforeach() -list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) -install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) - -if(NOT (WINDOWS OR CYGWIN)) - if(SDL_SHARED) - if (APPLE) - set(SOEXT "dylib") - else() - set(SOEXT "so") - endif() - if(NOT ANDROID) - install(CODE " - execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink - \"libSDL2-2.0.${SOEXT}\" \"libSDL2.${SOEXT}\")") - install(FILES ${SDL2_BINARY_DIR}/libSDL2.${SOEXT} DESTINATION "lib${LIB_SUFFIX}") - endif() - endif() - if(FREEBSD) - # FreeBSD uses ${PREFIX}/libdata/pkgconfig - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") - else() - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc - DESTINATION "lib${LIB_SUFFIX}/pkgconfig") - endif() - install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) - # TODO: what about the .spec file? Is it only needed for RPM creation? - install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "${CMAKE_INSTALL_FULL_DATAROOTDIR}/aclocal") -endif() - -##### Uninstall target ##### - -if(NOT TARGET uninstall) - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in" - "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" - IMMEDIATE @ONLY) - - add_custom_target(uninstall - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) -endif() - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/sync.sh b/node_modules/npm-mas-mas/cmaki_generator/sync.sh deleted file mode 100644 index 6ad62d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/sync.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -#pattern=*.py CMakeLists.txt -pattern="utils.cmake common.yml CMakeLists.txt *.py" -other_dir=$1 - -for i in $(ls $pattern); do - if [ -f $other_dir/$i ]; then - diff $i $other_dir/$i > /dev/null || meld $i $other_dir/$i - fi -done - diff --git a/node_modules/npm-mas-mas/cmaki_generator/third_party.py b/node_modules/npm-mas-mas/cmaki_generator/third_party.py deleted file mode 100644 index aaad57e..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/third_party.py +++ /dev/null @@ -1,1508 +0,0 @@ -import os -import sys -import utils -import logging -import traceback -import datetime -import hash_version -import copy -import fnmatch - - -class InvalidPlatform(Exception): - def __init__(self, plat): - self._plat = plat - def __str__(self): - return "Invalid platform detected: %s" % self._plat - - -class DontExistsFile(Exception): - def __init__(self, source_filename): - self._source_filename = source_filename - def __str__(self): - return 'Dont exists file %s' % self._source_filename - - -class FailPrepare(Exception): - def __init__(self, node): - self._node = node - def __str__(self): - return 'Failing preparing package: %s' % self._node.get_package_name() - - -class AmbiguationLibs(Exception): - def __init__(self, kind, package, build_mode): - self._kind = kind - self._package = package - self._build_mode = build_mode - def __str__(self): - return "Ambiguation in %s in %s. Mode: %s. Candidates:" % (self._kind, self._package, self._build_mode) - - -class NotFoundInDataset(Exception): - def __init__(self, msg): - self._msg = msg - def __str__(self): - return "%s" % self._msg - - -class FailThirdParty(Exception): - def __init__(self, msg): - self._msg = msg - def __str__(self): - return "%s" % self._msg - - -class Found(Exception): - pass - - -def prepare_cmakefiles(cmake_files): - if not os.path.isdir(cmake_files): - logging.error('Invalid cmake files: {}'.format(camkefiles)) - - -def get_identifier(mode): - env = os.environ.copy() - cmaki_pwd = env['CMAKI_PWD'] - if utils.is_windows(): - script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.exe') - else: - script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.sh') - if not os.path.isfile(script_identifier): - raise Exception("there is no {} script".format(script_identifier)) - env['CMAKI_INFO'] = mode - return list(utils.get_stdout(script_identifier, env=env))[0] - - -def search_fuzzy(data, fuzzy_key, fallback='default'): - for key in data: - if fnmatch.fnmatch(fuzzy_key, key): - return data[key] - else: - if fallback in data: - return data[fallback] - else: - logging.error("not found 'default' platform or %s" % fuzzy_key) - raise Exception("not found '{}'".format(fuzzy_key)) - - -if 'MODE' not in os.environ: - logging.warning('Using Debug by default. For explicit use, define environment var MODE') - os.environ['MODE'] = 'Debug' - -if 'CMAKI_INSTALL' not in os.environ: - logging.warning('Using CMAKI_INSTALL by default. For explicit use, define environment var CMAKI_INSTALL') - os.environ['CMAKI_INSTALL'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier', 'bin') - -if 'CMAKI_PWD' not in os.environ: - logging.warning('Using CMAKI_PWD by default. For explicit use, define environment var CMAKI_PWD') - os.environ['CMAKI_PWD'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier') - - -# -# INMUTABLE GLOBALS -# - -CMAKELIB_URL='https://github.com/makiolo/cmaki.git' -prefered = {} -prefered['Debug'] = ['Debug', 'RelWithDebInfo', 'Release'] -prefered['RelWithDebInfo'] = ['RelWithDebInfo', 'Release', 'Debug'] -prefered['Release'] = ['Release', 'RelWithDebInfo', 'Debug'] -magic_invalid_file = '__not_found__' -exceptions_fail_group = (OSError, IOError, ) -exceptions_fail_program = (KeyboardInterrupt, ) -uncompress_strip_default = '.' -uncompress_prefix_default = '.' -priority_default = 50 -build_unittests_foldername = 'unittest' -# detect platform -platform = get_identifier('ALL') -arch = get_identifier('ARCH') -operative_system = get_identifier('OS') -somask_id = operative_system[0] -archs = {platform: arch} -platforms = [platform] -logging.info('Detecting platform from script like: {} / {}'.format(platform, arch)) - -alias_priority_name = { 10: 'minimal', - 20: 'tools', - 30: 'third_party' } -alias_priority_name_inverse = {v: k for k, v in alias_priority_name.items()} - - -def is_valid(package_name, mask): - return (mask.find(somask_id) != -1) and (package_name != 'dummy') - - -def is_blacklisted(blacklist_file, no_blacklist, package_name): - blacklisted = False - if os.path.exists(blacklist_file): - with open(blacklist_file, 'rt') as f: - for line in f.readlines(): - if line.strip() == package_name: - blacklisted = True - break - # --no-blacklist can annular effect of blacklist - if blacklisted and (package_name in no_blacklist): - blacklisted = False - return blacklisted - - -class ThirdParty: - def __init__(self, user_parameters, name, parameters): - self.user_parameters = user_parameters - self.name = name - self.parameters = parameters - self.depends = [] - self.exceptions = [] - self.interrupted = False - self.ret = 0 # Initial return code - self.fail_stage = "" - self.blacklisted = is_blacklisted(self.user_parameters.blacklist, self.user_parameters.no_blacklist, self.get_package_name()) - self.published_invalidation = False - - - def __hash__(self): - return hash((self.get_package_name(), self.get_priority(), self.get_mask())) - - - def __eq__(self, other): - return (self.get_package_name() == other.get_package_name()) and (self.get_priority() == other.get_priority()) and (self.get_mask() == other.get_mask()) - - - def __ne__(self, other): - return not self.__eq__(other) - - - def __repr__(self): - return "%s (%s)" % (self.get_package_name(), self.get_mask()) - - - def __str__(self): - return "%s (%s)" % (self.get_package_name(), self.get_mask()) - - - def get_uncompress_strip(self, pos = 0): - try: - if isinstance(self.parameters['uncompress_strip'], list): - return self.parameters['uncompress_strip'][pos] - else: - return self.parameters['uncompress_strip'] - except KeyError: - # default value - return uncompress_strip_default - - - def get_uncompress_prefix(self, pos = 0): - try: - if isinstance(self.parameters['uncompress_prefix'], list): - return self.parameters['uncompress_prefix'][pos] - else: - return self.parameters['uncompress_prefix'] - except KeyError: - # default value - return uncompress_prefix_default - - - def get_uncompress(self, pos = 0): - try: - if self.parameters['uncompress'] is not None: - if isinstance(self.parameters['uncompress'], list): - return self.parameters['uncompress'][pos].find(somask_id) != -1 - else: - return self.parameters['uncompress'].find(somask_id) != -1 - else: - return False - except KeyError: - # default value - return True - - - def get_depends_raw(self): - return self.depends - - - def get_depends(self): - try: - return self.parameters['depends'] - except KeyError: - # default value - return None - - - def get_generate_custom_script(self, source_dir): - path_build = self.get_path_custom_script(source_dir, name='.build') - build_content = self.get_build_script_content() - if build_content is not None: - with open(path_build, 'wt') as f: - f.write(build_content) - - - def get_path_custom_script(self, source_folder, name = 'build'): - if utils.is_windows(): - path_build = os.path.join(source_folder, name + '.cmd') - else: - path_build = os.path.join(source_folder, name + '.sh') - return path_build - - - def has_custom_script(self, source_folder): - script_custom = os.path.exists( self.get_path_custom_script(source_folder) ) - return (self.get_build_script_content() is not None) or script_custom - - - def get_build_script_content(self): - try: - if not utils.is_windows(): - return self.parameters['build'] - else: - return self.parameters['build_windows'] - except KeyError: - # default value - return None - - - def get_source(self): - try: - source = self.parameters['source'] - if source is not None: - if not isinstance(source, list): - return [source] - else: - return source - else: - return [] - except KeyError: - # default value - return [] - - - def get_source_filename(self, position=0): - try: - return self.parameters['source_filename'] - except KeyError: - # default value - source = self.get_source()[position] - filename = source.split('/')[-1] - return filename - - - def get_sources_all(self, position=0): - try: - return self.parameters['sources_all'] - except KeyError: - return False - - - def get_before_copy(self): - try: - return self.parameters['before_copy'] - except KeyError: - # default value - return [] - - - def get_short_path(self): - try: - return self.parameters['short_path'] - except KeyError: - # default value - return False - - - def has_library(self, platform_info): - package = self.get_package_name() - return (('static' in platform_info) and (package != 'dummy')) or (('dynamic' in platform_info) and (package != 'dummy')) - - - def needs(self, node): - if node.is_valid(): - self.depends.append(node) - - - def get_package_name(self): - return self.name - - - def get_package_name_norm(self): - package = self.get_package_name() - for c in '-\\/:*?"<>|': - package = package.replace(c, '_') - return package - - - def get_package_name_norm_upper(self): - package_norm = self.get_package_name_norm() - return package_norm.upper() - - - def set_version(self, newversion): - self.parameters['version'] = newversion - - - def get_version(self): - try: - version = self.parameters['version'] - if version is None: - return '0.0.0.0' - else: - return version - except KeyError: - if self.get_package_name() != 'dummy': - raise Exception('[%s] Version is a mandatory field.' % self.get_package_name()) - - - def get_version_manager(self): - try: - version = self.get_version() - if version == '0.0.0.0': - return self.parameters['version_manager'] - else: - # si tiene version -> no usar renombrado git - return None - except KeyError: - return None - - - def get_cmake_target(self): - try: - return self.parameters['cmake_target'] - except KeyError: - return 'install' - - - def get_post_install(self): - try: - return self.parameters['post_install'] - except KeyError: - return [] - - - def get_priority(self): - try: - return int(self.parameters['priority']) - except KeyError: - return priority_default - - - def is_packing(self): - try: - return self.parameters['packing'] - except KeyError: - # default value - return True - - - def get_branch(self): - try: - return self.parameters['branch'] - except KeyError: - # default value - return None - - - def get_build_modes(self): - build_modes = [] - try: - if 'MODE' in os.environ and (os.environ['MODE'] != 'UNDEFINED'): - build_modes.append(os.environ['MODE']) - else: - mode = self.parameters['mode'] - if mode.find('d') != -1: - build_modes.append('Debug') - if mode.find('i') != -1: - build_modes.append('RelWithDebInfo') - if mode.find('r') != -1: - build_modes.append('Release') - except KeyError: - # no mode provided - build_modes.append('Debug') - build_modes.append('RelWithDebInfo') - build_modes.append('Release') - return build_modes - - - def get_mask(self): - try: - return self.parameters['mask'] - except KeyError: - return somask_id - - - def is_valid(self): - if self.blacklisted: - if not self.published_invalidation: - logging.debug('%s is not built because is blacklisted in %s' % (self.get_package_name(), os.path.basename(self.user_parameters.blacklist))) - self.published_invalidation = True - return False - return is_valid(self.get_package_name(), self.get_mask()) - - - def resolver(self, resolved, seen): - seen.append(self) - for edge in self.depends: - if edge not in resolved: - if edge in seen: - raise Exception('Circular reference detected: %s and %s' % (self.get_package_name(), edge.name)) - edge.resolver(resolved, seen) - if self.is_valid(): - resolved.append(self) - seen.remove(self) - - - def get_targets(self): - try: - return self.parameters['targets'] - except KeyError: - # default value - return [] - - - def get_exclude_from_all(self): - try: - return self.parameters['exclude_from_all'] - except KeyError: - # default value - return False - - - def get_exclude_from_clean(self): - try: - return self.parameters['exclude_from_clean'] - except KeyError: - # default value - return False - - - def get_unittest(self): - try: - return self.parameters['unittest'] - except KeyError: - # default value - return None - - - def get_cmake_prefix(self): - try: - cmake_prefix = self.parameters['cmake_prefix'] - if cmake_prefix.endswith('CMakeLists.txt'): - return os.path.dirname(cmake_prefix) - return cmake_prefix - except KeyError: - # default value - return "." - - - def get_generator_targets(self, plat, _, compiler_cpp, ext_sta, ext_dyn): - - package = self.get_package_name_norm() - - for targets in self.get_targets(): - - for target_name in targets: - - platform_info = None - platform_extra = None - - target_info = targets[target_name] - if 'info' in target_info: - outputinfo = search_fuzzy(target_info['info'], plat) - if outputinfo is not None: - platform_info = copy.deepcopy( outputinfo ) - - if 'extra' in target_info: - outputinfo_extra = search_fuzzy(target_info['extra'], plat) - if outputinfo_extra is not None: - platform_extra = copy.deepcopy( outputinfo_extra ) - - if (platform_info is not None) and (platform_extra is not None): - platform_info = utils.smart_merge(platform_info, platform_extra) - - # variables for use in "info" and "extra" - platform_info = utils.apply_replaces_vars(platform_info, { - 'TARGET': target_name, - 'TARGET_UPPER': target_name.upper(), - 'PACKAGE': package, - 'PACKAGE_UPPER': package.upper(), - 'PLATFORM': plat, - 'COMPILER': os.path.basename(compiler_cpp), - 'EXT_DYN': ext_dyn, - 'EXT_STA': ext_sta, - 'ARCH': archs[plat], - }) - - if platform_info is None: - logging.error('No platform info in package %s, platform %s' % (package, plat)) - logging.error("%s" % targets) - sys.exit(1) - - yield (target_name, platform_info) - - - def have_any_in_target(self, plat, key, compiler_replace_maps): - any_static = False - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): - for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - if key in platform_info: - any_static = True - return any_static - - - def get_generate_find_package(self): - try: - return self.parameters['generate_find_package'] - except KeyError: - # default value - return True - - - def compiler_iterator(self, plat, compiler_replace_maps): - - plat_parms = search_fuzzy(self.parameters['platforms'], plat) - try: - generator = plat_parms['generator'] - except KeyError: - generator = None - - try: - compilers = plat_parms['compiler'] - except KeyError: - compilers = None - - # resolve map - compiler_replace_resolved = {} - for var, value in compiler_replace_maps.items(): - new_value = value - new_value = new_value.replace('$PLATFORM', plat) - compiler_replace_resolved[var] = new_value - compiler_replace_resolved['$ARCH'] = archs[plat] - compiler_replace_resolved['${ARCH}'] = archs[plat] - - # get compiler info - compiler = get_identifier('COMPILER') - - ext_dyn = plat_parms['ext_dyn'] - ext_sta = plat_parms['ext_sta'] - if compilers is None: - compilers = [('%s, %s' % (compiler, compiler))] - - for compiler in compilers: - compilers_tuple = compiler.split(',') - assert(len(compilers_tuple) == 2) - compiler_c = compilers_tuple[0].strip() - compiler_cpp = compilers_tuple[1].strip() - - compiler_c = utils.apply_replaces(compiler_c, compiler_replace_resolved) - compiler_cpp = utils.apply_replaces(compiler_cpp, compiler_replace_resolved) - - env_new = {} - env_modified = os.environ.copy() - - for env_iter in [env_modified, env_new]: - - env_iter['COMPILER'] = str(compiler) - env_iter['PLATFORM'] = str(plat) - env_iter['PACKAGE'] = str(self.get_package_name()) - env_iter['VERSION'] = str(self.get_version()) - env_iter['ARCH'] = str(archs[plat]) - - try: - environment = plat_parms['environment'] - - try: - environment_remove = environment['remove'] - for key, values in environment_remove.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - oldpath = oldpath.replace(v, '') - env_iter[key] = oldpath - except KeyError: - pass - - # insert front with seprator = ":" - try: - environment_push_front = environment['push_front'] - for key, values in environment_push_front.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - if len(oldpath) == 0: - separator = '' - else: - # -L / -I / -R use space - if v.startswith('-'): - separator = ' ' - else: - separator = ':' - oldpath = str('%s%s%s' % (v, separator, oldpath)) - env_iter[key] = oldpath - except KeyError: - pass - - # insert back with separator " " - try: - environment_flags = environment['flags'] - for key, values in environment_flags.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - if len(oldpath) == 0: - separator = '' - else: - separator = ' ' - oldpath = str('%s%s%s' % (oldpath, separator, v)) - env_iter[key] = oldpath - except KeyError: - pass - - # insert new environment variables - try: - environment_assign = environment['assign'] - for key, value in environment_assign.items(): - value = utils.apply_replaces(value, compiler_replace_resolved) - env_iter[key] = value - except KeyError: - pass - - except KeyError: - pass - - yield (compiler_c, compiler_cpp, generator, ext_sta, ext_dyn, env_modified, env_new) - - - def remove_cmake3p(self, cmake3p_dir): - package_cmake3p = os.path.join(cmake3p_dir, self.get_base_folder()) - logging.debug('Removing cmake3p %s' % package_cmake3p) - if os.path.exists(package_cmake3p): - utils.tryremove_dir(package_cmake3p) - for dep in self.get_depends_raw(): - dep.remove_cmake3p(cmake3p_dir) - - - def get_base_folder(self): - package = self.get_package_name() - version = self.get_version() - return '%s-%s' % (package, version) - - - def get_workspace(self, plat): - package = self.get_package_name() - version = self.get_version() - return '%s-%s-%s' % (package, version, plat) - - - def get_build_directory(self, plat, build_mode): - package = self.get_package_name() - version = self.get_version() - if not self.get_short_path(): - return '.build_%s-%s-%s_%s' % (package, version, plat, build_mode) - else: - return '.bs_%s%s%s%s' % (package[:3], version[-1:], plat, build_mode) - - def get_binary_workspace(self, plat): - install_directory = os.path.join(self.user_parameters.prefix, self.get_workspace(plat)) - utils.trymkdir(install_directory) - return install_directory - - - def get_install_directory(self, plat): - install_directory = os.path.join(self.get_binary_workspace(plat), self.get_base_folder(), plat) - return install_directory - - - def get_download_directory(self): - package = self.get_package_name() - return '.download_%s' % package - - - def get_original_directory(self): - package = self.get_package_name() - return '.download_original_%s' % package - - - def apply_replace_maps(self, compiler_replace_maps): - package = self.get_package_name() - package_norm = self.get_package_name_norm() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - basedir = os.path.abspath('..') - compiler_replace_maps['$%s_BASE' % package_norm] = os.path.join(basedir, self.get_workspace('$PLATFORM'), self.get_base_folder()) - - - def generate_scripts_headers(self, compiler_replace_maps): - package = self.get_package_name() - package_norm = self.get_package_name_norm() - version = self.get_version() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - basedir = self.user_parameters.prefix - rootdir = self.user_parameters.rootdir - - # generate find.cmake - build_directory = self.get_build_directory(r"${CMAKI_PLATFORM}", r"${GLOBAL_BUILD_MODE}") - with open('find.cmake', 'wt') as f: - f.write("SET(%s_VERSION %s CACHE STRING \"Last version compiled ${PACKAGE}\" FORCE)\n" % (package_norm, version)) - f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}/include\" %s_INCLUDE)\n" % (basedir, package, version, package, version, package_norm)) - f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}\" %s_LIBDIR)\n" % (basedir, package, version, package, version, package_norm)) - f.write("file(TO_NATIVE_PATH \"%s/%s\" %s_BUILD)\n" % (rootdir, build_directory, package_norm)) - f.write("SET(%s_INCLUDE ${%s_INCLUDE} CACHE STRING \"Include dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - f.write("SET(%s_LIBDIR ${%s_LIBDIR} CACHE STRING \"Libs dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - f.write("SET(%s_BUILD ${%s_BUILD} CACHE STRING \"Build dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - - # genereate find.script / cmd - if utils.is_windows(): - build_directory = self.get_build_directory("%PLATFORM%", "%BUILD_MODE%") - with open('find.cmd', 'wt') as f: - f.write("set %s_VERSION=%s\n" % (package_norm, version)) - f.write("set %s_HOME=%s\%s-%s-%%PLATFORM%%\%s-%s\%%PLATFORM%%\n" % (package_norm, basedir, package, version, package, version)) - f.write("set %s_BASE=%s\%s-%s-%%PLATFORM%%\%s-%s\n" % (package_norm, basedir, package, version, package, version)) - f.write("set SELFHOME=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\%%PLATFORM%%\n" % (basedir)) - f.write("set SELFBASE=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\n" % (basedir)) - f.write("set %s_BUILD=%s\%s\n" % (package_norm, rootdir, build_directory)) - f.write(r"md %SELFHOME%") - f.write("\n") - else: - build_directory = self.get_build_directory("${PLATFORM}", "${BUILD_MODE}") - with open('find.script', 'wt') as f: - f.write("#!/bin/bash\n") - f.write("%s_VERSION=%s\n" % (package_norm, version)) - f.write("%s_HOME=%s/%s-%s-$PLATFORM/%s-%s/$PLATFORM\n" % (package_norm, basedir, package, version, package, version)) - f.write("%s_BASE=%s/%s-%s-$PLATFORM/%s-%s\n" % (package_norm, basedir, package, version, package, version)) - f.write("SELFHOME=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION/$PLATFORM\n" % (basedir)) - f.write("SELFBASE=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION\n" % (basedir)) - f.write("%s_BUILD=%s/%s\n" % (package_norm, rootdir, build_directory)) - f.write("mkdir -p $SELFHOME\n") - - - def remove_cmakefiles(self): - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - - - def remove_scripts_headers(self): - package = self.get_package_name() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - utils.tryremove('find.cmake') - utils.tryremove('find.script') - utils.tryremove('find.cmd') - utils.tryremove('.build.sh') - utils.tryremove('.build.cmd') - utils.tryremove_dir_empty(to_package) - - - def generate_3rdpartyversion(self, output_dir): - package = self.get_package_name() - package_norm_upper = self.get_package_name_norm_upper() - version = self.get_version() - packing = self.is_packing() - if not packing: - logging.debug("package %s, don't need 3rdpartyversion" % package) - return - thirdparty_path = os.path.join(output_dir, '3rdpartyversions') - utils.trymkdir(thirdparty_path) - with utils.working_directory(thirdparty_path): - with open('%s.cmake' % package, 'wt') as f: - f.write('SET(%s_REQUIRED_VERSION %s EXACT)\n' % (package_norm_upper, version)) - - - def _smart_uncompress(self, position, package_file_abs, uncompress_directory, destiny_directory, compiler_replace_maps): - uncompress = self.get_uncompress(position) - uncompress_strip = self.get_uncompress_strip(position) - uncompress_prefix = self.get_uncompress_prefix(position) - if uncompress: - if (uncompress_strip == uncompress_strip_default) and (uncompress_prefix == uncompress_prefix_default): - # case fast (don't need intermediate folder) - ok = utils.extract_file(package_file_abs, destiny_directory, self.get_first_environment(compiler_replace_maps)) - else: - source_with_strip = os.path.join(uncompress_directory, uncompress_strip) - destiny_with_prefix = os.path.join(destiny_directory, uncompress_prefix) - ok = utils.extract_file(package_file_abs, uncompress_directory, self.get_first_environment(compiler_replace_maps)) - utils.move_folder_recursive(source_with_strip, destiny_with_prefix) - utils.tryremove_dir(source_with_strip) - if not ok: - raise Exception('Invalid uncompressed package %s - %s' % (package, package_file_abs)) - - - def _prepare_third_party(self, position, url, build_directory, compiler_replace_maps): - package = self.get_package_name() - source_filename = self.get_source_filename(position) - uncompress_strip = self.get_uncompress_strip(position) - uncompress_prefix = self.get_uncompress_prefix(position) - uncompress = self.get_uncompress(position) - uncompress_directory = self.get_download_directory() - utils.trymkdir(uncompress_directory) - - logging.debug('source_filename = %s' % source_filename) - logging.debug('uncompress_strip = %s' % uncompress_strip) - logging.debug('uncompress_prefix = %s' % uncompress_prefix) - logging.debug('uncompress = %s' % uncompress) - - # resolve url vars - url = url.replace('$NPP_SERVER', os.environ['NPP_SERVER']) - - # files in svn - if(url.startswith('svn://')): - # strip is not implemmented with svn:// - utils.tryremove_dir( build_directory ) - logging.info('Download from svn: %s' % url) - self.safe_system( 'svn co %s %s' % (url, build_directory), compiler_replace_maps ) - # utils.tryremove_dir( os.path.join(build_directory, '.svn') ) - - elif(url.endswith('.git') or (url.find('github') != -1) or (url.find('bitbucket') != -1)) and not ( url.endswith('.zip') or url.endswith('.tar.gz') or url.endswith('.tar.bz2') or url.endswith('.tgz') or url.endswith('.py') ): - # strip is not implemmented with git:// - utils.tryremove_dir( build_directory ) - logging.info('Download from git: %s' % url) - branch = self.get_branch() - extra_cmd = '' - if branch is not None: - logging.info('clonning to branch %s' % branch) - extra_cmd = '%s' % branch - self.safe_system('git clone %s --depth=200 %s %s' % (extra_cmd, url, build_directory), compiler_replace_maps) - # self.safe_system('git clone %s %s' % (url, build_directory), compiler_replace_maps) - with utils.working_directory(build_directory): - # self.safe_system('git checkout {}'.format(extra_cmd), compiler_replace_maps) - self.safe_system('git submodule init', compiler_replace_maps) - self.safe_system('git submodule update', compiler_replace_maps) - # depends_file = self.user_parameters.depends - # if depends_file is not None: - # with utils.working_directory(build_directory): - # # leer el fichero de dependencias - # if os.path.exists(depends_file): - # data = utils.deserialize(depends_file) - # else: - # data = {} - # - # # obedecer, si trae algo util - # if package in data: - # logging.debug('data package version is %s' % data[package]) - # try: - # git_version = hash_version.to_git_version(build_directory, data[package]) - # logging.debug('data package in git version is %s' % git_version) - # logging.debug('updating to revision %s' % git_version) - # self.safe_system('git reset --hard %s' % git_version, compiler_replace_maps) - # except AssertionError: - # logging.info('using HEAD') - # - # # actualizar y reescribir - # revision = hash_version.get_last_version(build_directory) - # assert(len(revision) > 0) - # data[package] = revision - # utils.serialize(data, depends_file) - # else: - # logging.warning('not found depends file, using newest changeset') - - # file in http - elif ( url.startswith('http://') - or url.startswith('https://') - or url.endswith('.zip') - or url.endswith('.tar.gz') - or url.endswith('.tar.bz2') - or url.endswith('.tgz') - or url.endswith('.py') ): - - logging.info('Download from url: %s' % url) - # download to source_filename - package_file_abs = os.path.join(uncompress_directory, source_filename) - utils.download_from_url(url, package_file_abs) - if os.path.isfile(package_file_abs): - - # uncompress in download folder for after generate a patch with all changes - if not os.path.isdir( self.get_original_directory() ): - utils.trymkdir( self.get_original_directory() ) - logging.debug('preparing original uncompress') - # uncompress in original - self._smart_uncompress(position, package_file_abs, uncompress_directory, self.get_original_directory(), compiler_replace_maps) - else: - logging.debug('skipping original uncompress (already exists)') - - # uncompress in intermediate build directory - self._smart_uncompress(position, package_file_abs, uncompress_directory, build_directory, compiler_replace_maps) - - else: - raise DontExistsFile(source_filename) - - else: - raise Exception('Invalid source: %s - %s' % (package, url)) - - - def prepare_third_party(self, build_directory, compiler_replace_maps): - utils.trymkdir(build_directory) - package = self.get_package_name() - version = self.get_version() - sources_all = self.get_sources_all() - exceptions = [] - i = 0 - for source_url in self.get_source(): - if (source_url is None) or (len(source_url) <= 0) or (source_url == 'skip'): - logging.warning('[%s %s] Skipping preparation ...' % (package, version)) - else: - logging.warning('[%s %s] trying prepare from %s ...' % (package, version, source_url)) - try: - self._prepare_third_party(i, source_url, build_directory, compiler_replace_maps) - if not sources_all: - # sources_all = false ---> any source - # sources_all = Trie ----> all source - break - except exceptions_fail_group + exceptions_fail_program: - raise - except: - exceptions.append(sys.exc_info()) - i += 1 - if len(exceptions) > 0: - i = 0 - for exc_type, exc_value, exc_traceback in exceptions: - print ("---- Exception #%d / %d ----------" % (i+1, len(exceptions))) - traceback.print_exception(exc_type, exc_value, exc_traceback) - print ("----------------------------------") - i += 1 - raise FailPrepare(self) - - - def get_prefered_build_mode(self, prefered_build_mode_list): - build_modes = self.get_build_modes() - assert(len(prefered_build_mode_list) > 0) - prefered_build_mode = prefered_build_mode_list[0] - while (prefered_build_mode not in build_modes) and (len(prefered_build_mode_list)>0): - prefered_build_mode_list.pop(0) - if len(prefered_build_mode_list) > 0: - prefered_build_mode = prefered_build_mode_list[0] - return prefered_build_mode - - - def generate_cmake_condition(self, platforms, compiler_replace_maps): - target_uniques = set() - condition = '' - i = 0 - for plat in platforms: - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): - for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - package_lower = package.lower() - if (package_lower not in target_uniques) and (package_lower != 'dummy'): - target_uniques.add(package_lower) - if self.has_library(platform_info): - if i == 0: - condition += '(NOT TARGET %s)' % package_lower - else: - condition += ' OR (NOT TARGET %s)' % package_lower - i += 1 - return condition - - - def _search_library(self, rootdir, special_pattern): - ''' - 3 cases: - string - pattern as special string - list of strings - ''' - logging.debug('-- searching in {} with pattern: {}'.format(rootdir, special_pattern)) - - if special_pattern is None: - logging.debug('Failed searching lib in %s' % rootdir) - return False, None - - package = self.get_package_name() - if isinstance(special_pattern, list): - utils.verbose(self.user_parameters, 'Searching list %s' % special_pattern) - valid_ff = None - for ff in special_pattern: - valid, valid_ff = self._search_library(rootdir, utils.get_norm_path(ff)) - if valid: - break - return valid, valid_ff - - elif special_pattern.startswith('/') and special_pattern.endswith('/'): - pattern = special_pattern[1:-1] - utils.verbose(self.user_parameters, 'Searching rootdir %s, pattern %s' % (rootdir, pattern)) - files_found = utils.rec_glob(rootdir, pattern) - utils.verbose(self.user_parameters, 'Candidates %s' % files_found) - if len(files_found) == 1: - relfile = os.path.relpath(files_found[0], rootdir) - return True, utils.get_norm_path(relfile) - elif len(files_found) == 0: - msg = 'No library found in %s with pattern %s' % (rootdir, pattern) - logging.debug(msg) - return False, None - else: - msg = "Ambiguation in %s" % (package) - logging.debug(msg) - return False, None - else: - pathfull = os.path.join(rootdir, special_pattern) - utils.verbose(self.user_parameters, 'Checking file %s' % pathfull) - if os.path.exists(pathfull): - return True, utils.get_norm_path(special_pattern) - else: - return False, None - - - def search_library(self, workbase, dataset, kind, rootdir=None): - ''' - can throw exception - ''' - build_mode = self.get_prefered_build_mode(prefered[os.environ['MODE']]) - if rootdir is None: - rootdir = workbase - utils.verbose(self.user_parameters, 'Searching rootdir %s' % (rootdir)) - if (build_mode.lower() in dataset) and (kind in dataset[build_mode.lower()]): - special_pattern = dataset[build_mode.lower()][kind] - valid, valid_ff = self._search_library(rootdir, special_pattern) - if valid: - return valid_ff - else: - package = self.get_package_name() - raise AmbiguationLibs(kind, package, build_mode) - else: - raise NotFoundInDataset("Not found in dataset, searching %s - %s" % (build_mode.lower(), kind)) - - - def search_library_noexcept(self, workbase, dataset, kind): - try: - rootdir = os.path.abspath(workbase) - finalpath = self.search_library(workbase, dataset, kind, rootdir) - utils.superverbose(self.user_parameters, '[01] path: %s' % finalpath) - return finalpath - except AmbiguationLibs: - finalpath = '%s.%s' % (magic_invalid_file, kind) - utils.superverbose(self.user_parameters, '[02] path: %s' % finalpath) - return finalpath - except NotFoundInDataset: - finalpath = '%s.%s' % (magic_invalid_file, kind) - utils.superverbose(self.user_parameters, '[03] path: %s' % finalpath) - return finalpath - - - def check_parts_exists(self, workbase, package, target, dataset, kindlibs, build_modes=None): - ''' - Asegura que todas las partes del target existen, devuelve True o False si todas las partes existen - - workbase: directorio de instalacion base - package: nombre del paquete - target: nombre del target - dataset: es la estructura que contiene las estrategias de busqueda - {"debug": {"part1": ["*.dll", "*d.dll"]}, "release": {"part1": ["*_release.dll"]}} - kindlibs: tupla de partes a verificar, cada tupla representa (tipo, obligatoriedad) - build_modes: restringuir la busqueda a ciertos build modes - ''' - - all_ok = True - if build_modes is None: - build_modes = self.get_build_modes() - for build_mode in build_modes: - for kind, must in kindlibs: - try: - part_fullpath = os.path.join(workbase, self.search_library_noexcept(workbase, dataset, kind)) - if not os.path.exists(part_fullpath): - if must: - logging.error("[%s] Don't found %s in %s. Mode: %s. Path: %s. Dataset: %s" % (package, kind, target, build_mode, part_fullpath, dataset)) - all_ok = False - else: - msg = "[%s] Don't found %s in %s. Mode: %s. Path: %s" % (package, kind, target, build_mode, part_fullpath) - if build_mode != 'Release': - logging.warning(msg) - else: - logging.debug(msg) - except NotFoundInDataset as e: - if must: - logging.error("[ERROR] [NOT FOUND] [%s] %s" % (package, e)) - all_ok = False - return all_ok - - - def is_invalid_lib(self, libpath): - return (libpath is None) or (utils.get_filename_no_ext(os.path.basename(libpath)) == magic_invalid_file) - - - def generate_cmakefiles(self, platforms, folder_output, compiler_replace_maps): - errors = 0 - packing = self.is_packing() - if not packing: - logging.warning("package: %s don't need generate cmakefiles" % self.get_package_name()) - return errors - oldcwd = os.getcwd() - utils.trymkdir(folder_output) - with utils.working_directory(folder_output): - package = self.get_package_name() - package_lower = package.lower() - package_upper = package.upper() - with open('%s-config.cmake' % package_lower, 'wt') as f: - f.write('''CMAKE_POLICY(PUSH) -CMAKE_POLICY(VERSION 3.0) -cmake_minimum_required(VERSION 3.0) -cmake_policy(SET CMP0011 NEW) - ''') - - condition = self.generate_cmake_condition(platforms, compiler_replace_maps) - if len(condition) > 0: - f.write('\nif(%s)\n' % condition) - - f.write('''\ninclude(${CMAKI_PATH}/facts/facts.cmake) -cmaki_download_package() -file(TO_NATIVE_PATH "${_DIR}" %s_HOME) -file(TO_NATIVE_PATH "${_DIR}/${CMAKI_PLATFORM}" %s_PREFIX) -set(%s_HOME "${%s_HOME}" PARENT_SCOPE) -set(%s_PREFIX "${%s_PREFIX}" PARENT_SCOPE) -include(${_MY_DIR}/${CMAKI_PLATFORM}.cmake) - ''' % (package_upper, package_upper, package_upper, package_upper, package_upper, package_upper)) - - if len(condition) > 0: - f.write('\nendif()\n') - - f.write('\nCMAKE_POLICY(POP)') - - with open('%s-config-version.cmake' % package_lower, 'wt') as f: - f.write('''\ -cmake_minimum_required(VERSION 3.0) -cmake_policy(SET CMP0011 NEW) -include(${CMAKI_PATH}/facts/facts.cmake) -cmaki_package_version_check() - ''') - - for plat in platforms: - - workspace = self.get_workspace(plat) - base_folder = self.get_base_folder() - - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): - - with open('%s.cmake' % (plat), 'wt') as f: - - install_3rdparty_dependencies = True - - includes_set = [] - definitions_set = [] - system_depends_set = [] - depends_set = set() - - for target, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - - target_lower = target.lower() - target_upper = target.upper() - - if self.has_library(platform_info) and (target != 'dummy'): - f.write('if(NOT TARGET %s)\n\n' % target_lower) - - try: - add_3rdparty_dependencies = platform_info['add_3rdparty_dependencies'] - except KeyError: - add_3rdparty_dependencies = True - - try: - lib_provided = platform_info['lib_provided'] - except KeyError: - lib_provided = True - - if 'include' in platform_info: - include = platform_info['include'] - for d in include: - includes_set.append(d) - - # rename to definitions - if 'definitions' in platform_info: - definitions = platform_info['definitions'] - if definitions is not None: - for d in definitions: - definitions_set.append(d) - - if 'system_depends' in platform_info: - system_depends = platform_info['system_depends'] - if system_depends is not None: - for sd in system_depends: - system_depends_set.append(sd) - - if 'targets_paths' in self.parameters: - targets_paths = self.parameters['targets_paths'] - if targets_paths is not None: - for key, value in targets_paths.items(): - f.write('file(TO_NATIVE_PATH "%s" %s)\n' % (value, key)) - - # work_base = os.path.join(oldcwd, workspace, base_folder, plat) - work_base = self.get_install_directory(plat) - - if ('executable' in platform_info) and (target != 'dummy'): - # a target in mode executable, dont need install - install_3rdparty_dependencies = False - - if 'use_run_with_libs' in platform_info: - if utils.is_windows(): - f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.cmd" %s_LAUNCHER)\n' % target_upper) - else: - f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.sh" %s_LAUNCHER)\n' % target_upper) - - executable = platform_info['executable'] - if not self.check_parts_exists(work_base, package, target, executable, [('bin', True)], build_modes=['Release']): - errors += 1 - release_bin = self.search_library_noexcept(work_base, executable, 'bin') - - for suffix in ['', '_EXECUTABLE']: - if 'use_run_with_libs' in platform_info: - f.write('set(%s%s "${%s_LAUNCHER}" "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, target_upper, plat, utils.get_norm_path(release_bin, native=False))) - else: - f.write('set(%s%s "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, plat, utils.get_norm_path(release_bin, native=False))) - f.write('file(TO_NATIVE_PATH "${%s%s}" %s%s)\n' % (target_upper, suffix, target_upper, suffix)) - f.write('\n') - - if ('dynamic' in platform_info) and (target != 'dummy'): - - dynamic = platform_info['dynamic'] - - # add depend - if add_3rdparty_dependencies: - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) - - if utils.is_windows(): - if not self.check_parts_exists(work_base, package, target, dynamic, [('dll', True), ('lib', lib_provided), ('pdb', False)]): - errors += 1 - - debug_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - release_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - relwithdebinfo_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - minsizerel_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - - debug_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - release_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - relwithdebinfo_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - minsizerel_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - - try: - relwithdebinfo_pdb = self.search_library(work_base, dynamic, 'pdb') - except Exception as e: - logging.debug('exception searching lib: %s' % e) - relwithdebinfo_pdb = None - - try: - debug_pdb = self.search_library(work_base, dynamic, 'pdb') - except Exception as e: - logging.debug('exception searching lib: %s' % e) - debug_pdb = None - - f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # dll - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_dll, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_dll, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_dll, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_dll, native=False))) - f.write('\n') - - # lib - if not self.is_invalid_lib(debug_lib): - f.write('\tIMPORTED_IMPLIB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) - if not self.is_invalid_lib(release_lib): - f.write('\tIMPORTED_IMPLIB_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) - if not self.is_invalid_lib(relwithdebinfo_lib): - f.write('\tIMPORTED_IMPLIB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) - if not self.is_invalid_lib(minsizerel_lib): - f.write('\tIMPORTED_IMPLIB_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) - f.write('\n') - - # pdb - if not self.is_invalid_lib(debug_pdb): - f.write('\tIMPORTED_PDB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_pdb, native=False))) - - if not self.is_invalid_lib(relwithdebinfo_pdb): - f.write('\tIMPORTED_PDB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_pdb, native=False))) - - f.write(')\n') - else: - - if not self.check_parts_exists(work_base, package, target, dynamic, [('so', True)]): - errors += 1 - - debug_so = self.search_library_noexcept(work_base, dynamic, 'so') - release_so = self.search_library_noexcept(work_base, dynamic, 'so') - relwithdebinfo_so = self.search_library_noexcept(work_base, dynamic, 'so') - minsizerel_so = self.search_library_noexcept(work_base, dynamic, 'so') - - try: - debug_so_full = os.path.join(oldcwd, work_base, debug_so) - debug_soname = utils.get_soname(debug_so_full, env=env_modified) - logging.debug('detected soname in debug library: {}'.format(debug_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - debug_soname = None - - try: - release_so_full = os.path.join(oldcwd, work_base, release_so) - release_soname = utils.get_soname(release_so_full, env=env_modified) - logging.debug('detected soname in release library: {}'.format(release_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - release_soname = None - - try: - relwithdebinfo_so_full = os.path.join(oldcwd, work_base, relwithdebinfo_so) - relwithdebinfo_soname = utils.get_soname(relwithdebinfo_so_full, env=env_modified) - logging.debug('detected soname in relwithdebinfo library: {}'.format(relwithdebinfo_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - relwithdebinfo_soname = None - - try: - minsizerel_so_full = os.path.join(oldcwd, work_base, minsizerel_so) - minsizerel_soname = utils.get_soname(minsizerel_so_full, env=env_modified) - logging.debug('detected soname in minsizerel library: {}'.format(minsizerel_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - minsizerel_soname = None - - f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # so - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_so, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_so, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_so, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_so, native=False))) - f.write('\n') - - # soname - if (debug_soname is not None) and os.path.exists( os.path.join(os.path.dirname(debug_so_full), debug_soname) ): - f.write('\tIMPORTED_SONAME_DEBUG "%s"\n' % utils.get_norm_path(debug_soname, native=False)) - - if (release_soname is not None) and os.path.exists( os.path.join(os.path.dirname(release_so_full), release_soname) ): - f.write('\tIMPORTED_SONAME_RELEASE "%s"\n' % utils.get_norm_path(release_soname, native=False)) - - if (relwithdebinfo_soname is not None) and os.path.exists( os.path.join(os.path.dirname(relwithdebinfo_so_full), relwithdebinfo_soname) ): - f.write('\tIMPORTED_SONAME_RELWITHDEBINFO "%s"\n' % utils.get_norm_path(relwithdebinfo_soname, native=False)) - - if (minsizerel_soname is not None) and os.path.exists( os.path.join(os.path.dirname(minsizerel_so_full), minsizerel_soname) ): - f.write('\tIMPORTED_SONAME_MINSIZEREL "%s"\n' % utils.get_norm_path(minsizerel_soname, native=False)) - - f.write(')\n') - - if ('static' in platform_info) and (target != 'dummy'): - - static = platform_info['static'] - - if not self.check_parts_exists(work_base, package, target, static, [('lib', True)]): - errors += 1 - - debug_lib = self.search_library_noexcept(work_base, static, 'lib') - release_lib = self.search_library_noexcept(work_base, static, 'lib') - relwithdebinfo_lib = self.search_library_noexcept(work_base, static, 'lib') - minsizerel_lib = self.search_library_noexcept(work_base, static, 'lib') - - if add_3rdparty_dependencies: - # register target - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) - - f.write('ADD_LIBRARY(%s STATIC IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # lib - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) - - f.write(')\n') - - if install_3rdparty_dependencies and (target != 'dummy'): - f.write('cmaki_install_3rdparty(%s)\n' % target_lower) - f.write('\n') - - if self.has_library(platform_info) and (target != 'dummy'): - f.write('endif()\n\n') - - # print includes - if len(includes_set) > 0: - for d in list(set(includes_set)): - f.write('list(APPEND %s_INCLUDE_DIRS ${_DIR}/%s)\n' % (package_upper, d)) - - f.write('\n') - - if len(definitions_set) > 0: - for d in list(set(definitions_set)): - f.write('add_definitions(%s)\n' % d) - f.write('\n') - - if len(system_depends_set) > 0: - f.write('# begin system depends\n') - for sd in list(set(system_depends_set)): - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, sd)) - f.write('# end system depends\n') - - # if self.get_generate_find_package(): - # f.write('# Depends of %s (%s)\n' % (self.get_package_name(), self.get_version())) - # for dep in self.get_depends_raw(): - # package_name = dep.get_package_name() - # if package_name not in depends_set: - # if dep.have_any_in_target(plat, 'dynamic', compiler_replace_maps): - # f.write('cmaki_find_package(%s)\n' % (package_name)) - # else: - # f.write('# cmaki_find_package(%s) # static package\n' % (package_name)) - # depends_set.add(package_name) - # f.write('\n') - - logging.info('----------------------------------------------------') - if self.user_parameters.fast: - logging.debug('skipping for because is in fast mode: "generate_cmakefiles"') - break - - return errors - - - def show_environment_vars(self, env_modified): - package = self.get_package_name() - logging.debug('------- begin print environment variables for compile %s ---------' % package) - for key, value in sorted(env_modified.items()): - logging.debug("%s=%s" % (key, value)) - logging.debug('------- end print environment variables for compile %s -----------' % package) - - - def get_first_environment(self, compiler_replace_maps): - for plat in platforms: - for _, _, _, _, _, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): - return env_modified - return os.environ.copy() - - - def safe_system(self, cmd, compiler_replace_maps): - return utils.safe_system(cmd, env=self.get_first_environment(compiler_replace_maps)) - - - def remove_packages(self): - # remove packages before - for plat in platforms: - prefix_package = os.path.join(self.user_parameters.prefix, '%s.tar.gz' % self.get_workspace(plat)) - prefix_package_cmake = os.path.join(self.user_parameters.prefix, '%s-cmakelib-%s.tar.gz' % (self.get_base_folder(), sys.platform)) - prefix_folder_cmake = os.path.join(self.user_parameters.third_party_dir, self.get_base_folder()) - logging.info("preremoving package %s" % prefix_package) - logging.info("preremoving package cmakefiles %s" % prefix_package_cmake) - logging.info("preremoving folder cmakefiles %s" % prefix_folder_cmake) - utils.tryremove(prefix_package) - utils.tryremove(prefix_package_cmake) - utils.tryremove_dir(prefix_folder_cmake) - - diff --git a/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt deleted file mode 100644 index a7a3475..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -PROJECT(UNITEST_CMAKI_GENERATOR_${CMAKI_PLATFORM}_${CMAKE_BUILD_TYPE} CXX) -cmake_minimum_required(VERSION 3.0) - -include(cmaki) - -get_filename_component(BASEDIR "${CMAKE_CURRENT_LIST_FILE}" PATH) -set(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_BINARY_DIR}) -set(EXECUTABLE_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") -set(LIBRARY_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") - -foreach(PACKAGE_ITER ${FIND_PACKAGES}) - string(TOUPPER ${PACKAGE_ITER} PACKAGE_UPPER) - string(REGEX REPLACE "-" "_" PACKAGE_UPPER ${PACKAGE_UPPER}) - include("${DEPENDS_PATH}/3rdpartyversions/${PACKAGE_ITER}.cmake") - message("find_package in test: ${PACKAGE_UPPER}, version: ${${PACKAGE_UPPER}_REQUIRED_VERSION}") - cmaki_find_package(${PACKAGE_ITER} ${${PACKAGE_UPPER}_REQUIRED_VERSION}) -endforeach() -message("include dirs: ${CMAKI_INCLUDE_DIRS}") -message("libs to link in test: ${CMAKI_LIBRARIES}") - -foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) -endforeach() -add_executable(test_${CMAKI_PLATFORM} ${UNITTEST_PATH}) -target_link_libraries(test_${CMAKI_PLATFORM} ${CMAKI_LIBRARIES}) -install(TARGETS test_${CMAKI_PLATFORM} DESTINATION "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") - -enable_testing() -add_test(NAME test_cmake_${CMAKI_PLATFORM} COMMAND test_${CMAKI_PLATFORM} WORKING_DIRECTORY "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") - diff --git a/node_modules/npm-mas-mas/cmaki_generator/upload.py b/node_modules/npm-mas-mas/cmaki_generator/upload.py deleted file mode 100644 index 034813c..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/upload.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import logging -import utils -from third_party import platforms - - -def upload(node, parameters, compiler_replace_maps): - - if parameters.server is None: - logging.warning('parameter --server is mandatory for upload, skipping upload') - else: - # pack tar.gz binaries - for plat in platforms: - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % node.get_workspace(plat)) - if not os.path.isfile(prefix_package): - logging.error('error dont exitsts: {}'.format(prefix_package)) - return False - command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package) - node.ret += abs(utils.safe_system(command)) - - if node.ret != 0: - return False - - # pack cmakefiles - if not parameters.no_packing_cmakefiles: - for plat in platforms: - base_folder = node.get_base_folder() - prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (base_folder, plat)) - if not os.path.isfile(prefix_package_cmake): - logging.error('error dont exitsts: {}'.format(prefix_package_cmake)) - return False - command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package_cmake) - node.ret += abs(utils.safe_system(command)) - - return True diff --git a/node_modules/npm-mas-mas/cmaki_generator/upload_package.py b/node_modules/npm-mas-mas/cmaki_generator/upload_package.py deleted file mode 100644 index 1d57c34..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/upload_package.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -import sys -import logging -# import urllib2 -import argparse -import logging -# import poster -import requests - -logger = logging.getLogger(__name__) - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--url', required=True, dest='url', help='url') - parser.add_argument('--filename', required=True, dest='filename', help='filename') - parser.add_argument('--field', dest='field', help='field name', default='uploaded') - parameters = parser.parse_args() - - if not os.path.exists(parameters.filename): - logging.error('dont exists %s' % parameters.filename) - sys.exit(1) - - with open(parameters.filename, 'rb') as f: - try: - response = requests.post(parameters.url, files={parameters.field: f}) - if response.status_code == 200: - sys.exit(0) - else: - logger.error('Error uploading file {} to {}'.format(parameters.filename, parameters.url)) - sys.exit(0) - except Exception as e: - logger.error('Exception uploading file {} to {}'.format(parameters.filename, parameters.url)) - sys.exit(0) - - # # Register the streaming http handlers with urllib2 - # poster.streaminghttp.register_openers() - # - # with open(parameters.filename, "rb") as f: - # datagen, headers = poster.encode.multipart_encode({parameters.field: f}) - # # Create the Request object - # request = urllib2.Request(parameters.url, datagen, headers) - # # Actually do the request, and get the response - # handler = urllib2.urlopen(request) - # logging.info( handler.read() ) - # if handler.getcode() == 200: - # sys.exit(0) - # else: - # sys.exit(1) diff --git a/node_modules/npm-mas-mas/cmaki_generator/utils.py b/node_modules/npm-mas-mas/cmaki_generator/utils.py deleted file mode 100644 index 767d218..0000000 --- a/node_modules/npm-mas-mas/cmaki_generator/utils.py +++ /dev/null @@ -1,531 +0,0 @@ -import os -import re -import sys -import shutil -import logging -import glob -import subprocess -import tarfile -import zipfile -import time -import contextlib -import hashlib -import yaml -import json -import errno -import multiprocessing -import fnmatch -from requests import get # to make GET request -from distutils.spawn import find_executable -try: - import bz2 - python_has_bz2 = True -except ImportError: - logging.debug('python module bz2 built-in is not available') - python_has_bz2 = False - - -class NotFoundProgram(Exception): - def __init__(self, msg): - self._msg = msg - def __repr__(self): - return "%s" % self._msg - - -def is_windows(): - return sys.platform.startswith("win") - - -def smart_merge(dict1, dict2): - assert(dict1 is not None) - assert(dict2 is not None) - for key, value in dict2.items(): - if isinstance(value, dict): - try: - dict1[key].update(value) - except KeyError: - dict1[key] = value - elif isinstance(value, list): - try: - dict1[key] += value - except KeyError: - dict1[key] = value - else: - dict1[key] = value - return dict1 - - -def apply_replaces(element, dictionary): - if isinstance(element, dict): - new = {} - for k,v in element.items(): - new[k] = apply_replaces(v, dictionary) - return new - elif isinstance(element, list): - new = [] - for e in element: - new.append( apply_replaces(e, dictionary) ) - return new - elif isinstance(element, bool): - return element - elif element is not None: - new_element = str(element) - for k,v in dictionary.items(): - # find in original, not in replaced - if str(element).find(k) != -1: - new_element = new_element.replace(k, v) - return new_element - else: - return None - - -def apply_replaces_vars(element, dictionary): - newdict = {} - for k,v in dictionary.items(): - newdict['$%s' % k] = v - newdict['${%s}' % k] = v - return apply_replaces(element, newdict) - - -def tryremove(filename): - try: - logging.debug('Removing file %s' % (filename)) - os.remove(filename) - except OSError: - pass - - -def _tryremove_dir(directory): - i = 0 - tries = 3 - while os.path.isdir(directory): - try: - shutil.rmtree(directory) - if not os.path.exists(directory): - i = tries + 1 - except OSError: - logging.debug('Fail removing %s. Retry %d/%d' % (directory, i + 1, tries)) - if i < tries: - time.sleep(1) - else: - raise Exception("Fail removing %s" % os.path.abspath(directory)) - finally: - i += 1 - - -def tryremove_dir(source): - logging.debug('Removing directory %s' % (source)) - if sys.platform.startswith('win'): - if os.path.isdir(source) and safe_system('rd /s /q %s' % source) != 0: - raise Exception('Fail removing %s' % source) - else: - _tryremove_dir(source) - - -def tryremove_dir_empty(source): - try: - os.rmdir(source) - except OSError as ex: - if ex.errno != errno.ENOTEMPTY: - logging.debug('Removing empty directory %s' % (source)) - - -def download_from_url(url, file_name): - with open(file_name, "wb") as file: - response = get(url) - file.write(response.content) - - -def setup_logging(level, logname): - format_console_log = '%(asctime)s %(levelname)-7s %(message)s' - format_date = '%H-%M:%S' - dirlog = os.path.dirname(logname) - if dirlog != '': - trymkdir(dirlog) - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - if(len(logging.root.handlers) == 1): - logging.root.removeHandler( logging.root.handlers[0] ) - handler = logging.StreamHandler() - handler.setLevel(level) - handler.setFormatter(logging.Formatter(format_console_log, format_date)) - logger.addHandler(handler) - handler2 = logging.FileHandler(logname) - handler2.setLevel(logging.DEBUG) - handler2.setFormatter(logging.Formatter(format_console_log, format_date)) - logger.addHandler(handler2) - - -def prompt_yes_no(default = False): - # raw_input returns the empty string for "enter" - yes = set(['yes','y', 'ye', '']) - no = set(['no','n']) - - choice = raw_input().lower() - if choice in yes: - return True - elif choice in no: - return False - else: - sys.stdout.write("Please respond with 'yes' or 'no'") - return default - - -def show_element(element, deep = 0): - if isinstance(element, dict): - for k,v in element.items(): - logging.info("%s<%s>" % ('\t'*deep, k)) - show_element(v, deep + 1) - elif isinstance(element, list): - for e in element: - show_element(e, deep + 1) - else: - logging.info('%s%s' % ('\t'*deep, element)) - - - -def rec_glob(rootdir, pattern): - - # logging.info('---> {} [START]'.format(rootdir)) - result = [] - for root, dirs, files in os.walk(rootdir): - # logging.info('---> {}'.format(root)) - for file in files: - # logging.info('---> {}'.format(file)) - if fnmatch.fnmatch(file, pattern): - # logging.info('---> {} [MATCH]'.format(file)) - result.append(os.path.join(root, file)) - return result - - -def trymkdir(directory): - if not os.path.exists( directory ): - os.makedirs( directory ) - - -def move_folder_recursive(source, destiny): - if not os.path.exists(source): - raise Exception('Error in move_folder_recursive: source not exists: %s' % source) - logging.debug('move recursive from {} to {}'.format(source, destiny)) - for archive in os.listdir(source): - # ignore some stuff - if archive.startswith('.git') or archive.startswith('.svn'): - continue - archive2 = os.path.join(source, archive) - destiny2 = os.path.join(destiny, archive) - if(os.path.isdir(archive2)): - move_folder_recursive(archive2, destiny2) - else: - if os.path.isfile(destiny2): - logging.debug('Replacing file %s' % destiny2) - tryremove(destiny2) - # try create destiny directory - trymkdir( os.path.dirname(destiny2) ) - # move file - shutil.move(archive2, destiny2) - - -def copy_folder_recursive(source, destiny): - if not os.path.exists(source): - raise Exception('Error in copy_folder_recursive: source not exists: %s' % source) - for archive in os.listdir(source): - # ignore some stuff - if archive.startswith('.git') or archive.startswith('.svn'): - continue - archive2 = os.path.join(source, archive) - destiny2 = os.path.join(destiny, archive) - if(os.path.isdir(archive2)): - copy_folder_recursive(archive2, destiny2) - else: - if os.path.isfile(destiny2): - logging.debug('Replacing file %s' % destiny2) - tryremove(destiny2) - # try create destiny directory - trymkdir( os.path.dirname(destiny2) ) - # copy file (and stat) - shutil.copy2(archive2, destiny2) - - -def extract_file(path, to_directory, environment): - - # convert to absolute - logging.debug('Extract file %s' % path) - path = os.path.abspath(path) - - if path.endswith('.zip'): - opener, mode = zipfile.ZipFile, 'r' - # elif path.endswith('.tar.gz') or path.endswith('.tgz'): - # opener, mode = tarfile.open, 'r:gz' - elif path.endswith('.tar.gz') or path.endswith('.tgz'): - # python have problems with big .tar.gz in linux -_- - if is_windows(): - with working_directory(to_directory): - logging.debug('Using cmake -E tar for package: %s' % path) - ret = safe_system('cmake -E tar zxvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - else: - with working_directory(to_directory): - logging.debug('Using system tar for package: %s' % path) - ret = safe_system('tar zxvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - elif path.endswith('.tar.bz2') or path.endswith('.tbz'): - # python have problems with big .tar.bz2 in windows - if is_windows(): - with working_directory(to_directory): - logging.debug('Using cmake -E tar for package: %s' % path) - ret = safe_system('cmake -E tar xvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - else: - if python_has_bz2: - opener, mode = tarfile.open, 'r:bz2' - else: - logging.warning('Not using python-bz2 module for uncompress: %s in %s' % (path, to_directory)) - with working_directory(to_directory): - logging.debug('Using bunzip2 and tar for package: %s' % path) - ret = safe_system('bunzip2 -c %s | tar xvf -' % path, env=environment) - ok = (ret == 0) - - # be careful, early return - return ok - elif path.endswith('.tar.xz'): - # needd "xz" - with working_directory(to_directory): - ret = safe_system('tar xpvf %s' % path, env=environment) - ok = (ret == 0) - return ok - else: - raise ValueError("Could not extract `%s` as no appropriate extractor is found" % path) - - # create directory if not exists - trymkdir(to_directory) - with working_directory(to_directory): - file = opener(path, mode) - try: - file.extractall() - finally: - file.close() - return True - - -# Copy Paste from run_tests (handler.py) -def detect_ncpus(): - return multiprocessing.cpu_count() - - -def get_norm_path(pathfile, native=True): - if native and is_windows(): - return pathfile.replace('/', '\\') - else: - return pathfile.replace('\\', '/') - - -def get_filename_no_ext(filename): - return os.path.splitext(filename)[0] - - -def get_soname(libfile, env=os.environ.copy()): - - if is_windows(): - logging.error('get_soname is not supported in windows') - return - - cmd = ['objdump', "-p", libfile] - for line in get_stdout(cmd, env, 'objdump'): - if line.find('SONAME') != -1: - return line.split()[1] - raise Exception('No soname detected in %s' % libfile) - - -def get_needed(libfile, env=os.environ.copy()): - - if is_windows(): - logging.error('get_needed is not supported in windows') - return - - cmd = ['objdump', "-p", libfile] - for line in get_stdout(cmd, env, 'objdump'): - if line.find('NEEDED') != -1: - yield line.split()[1] - - -def get_real_home(): - if sys.platform.startswith("sun"): - # problems launching subshell in solaris - return os.environ['HOME'] - elif sys.platform.startswith("linux"): - cmd = "REAL_HOME=$(cd $HOME && pwd -P) && echo $REAL_HOME" - for line in get_stdout(cmd): - return line - return os.environ['HOME'] - else: - return os.path.expanduser('~') - - -@contextlib.contextmanager -def working_directory(path): - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) - - -def walklevel(some_dir, level=1): - ''' - os.walk() with max level - ''' - some_dir = some_dir.rstrip(os.path.sep) - if not os.path.isdir(some_dir): - logging.error('%s is not folder' % some_dir) - sys.exit(1) - - num_sep = some_dir.count(os.path.sep) - for root, dirs, files in os.walk(some_dir): - yield root, dirs, files - num_sep_this = root.count(os.path.sep) - if num_sep + level <= num_sep_this: - del dirs[:] - - -def get_revision_svn(repo, path_svn='svn', env=os.environ.copy()): - ''' - This command need svn in PATH - ''' - if os.path.exists(repo): - with working_directory(repo): - env_copy = env.copy() - svn_bin = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'bin')) - svn_lib = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'lib')) - env_copy['PATH'] = "%s:%s" % (svn_bin, env_copy['PATH']) - env_copy['LD_LIBRARY_PATH'] = "%s:%s" % (svn_lib, env_copy['LD_LIBRARY_PATH']) - cmd = "%s info" % path_svn - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines=True, env=env_copy) - data, err = p.communicate() - - # clean stdout - data = [line.strip() for line in data.split('\n') if line.strip()] - - for line in data: - separator = 'Last Changed Rev: ' - if line.startswith(separator): - return int(line[len(separator):]) - else: - separator = 'Revisi.n del .ltimo cambio: ' - if re.match(separator, line) is not None: - return int(line[len(separator):]) - return -1 - - -def verbose(parameters, msg): - if parameters.verbose > 0: - logging.info(msg) - - -def superverbose(parameters, msg): - if parameters.verbose > 1: - logging.info(msg) - - -def hyperverbose(parameters, msg): - if parameters.verbose > 2: - logging.info(msg) - - -def md5sum(filename, blocksize=65536): - hash = hashlib.md5() - with open(filename, "rb") as f: - for block in iter(lambda: f.read(blocksize), b""): - hash.update(block) - return hash.hexdigest() - - -def serialize(pythonDict, fileName): - serialize_json(pythonDict, fileName) - - -def deserialize(fileName): - return deserialize_json(fileName) - - -def serialize_yaml(pythonDict, fileName): - serialiedData = yaml.dump(pythonDict, default_flow_style=True) - with open(fileName, 'wt') as f: - f.write(serialiedData) - - -def deserialize_yaml(fileName): - with open(fileName, 'rt') as f: - stringData = f.read() - return yaml.load(stringData) - - -def serialize_json(pythonDict, fileName): - serialiedData = json.dumps(pythonDict) - with open(fileName, 'wt') as f: - f.write(serialiedData) - - -def deserialize_json(fileName): - with open(fileName, 'rt') as f: - stringData = f.read() - return json.loads(stringData) - - -def get_stdout(cmd, env=os.environ.copy(), program_required=None): - if isinstance(cmd, list): - cmd = ' '.join(cmd) - # logging.debug('launch cmd: %s' % cmd) - - # search executable - ok = True - if program_required is not None: - ok = find_executable(program_required, env['PATH']) - if ok: - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) - data, err = p.communicate() - data = [line.strip() for line in data.split('\n') if line.strip()] - for line in data: - # logging.debug('[out cmd] %s' % line) - yield line - else: - raise NotFoundProgram('Not found program %s, for execute: %s' % (program_required, cmd)) - - -def safe_system(cmd, env=None): - if env is None: - env = os.environ.copy() - logging.debug("exec command: %s" % cmd) - - if 'CMAKI_PRINT' in env: - try: - return subprocess.call('{}'.format(cmd), env=env, shell=True) - except OSError as e: - logging.warning(str(e)) - return -1 - else: - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) - data, err = p.communicate() - data = [line for line in data.split('\n')] - if p.returncode != 0: - logging.error("begin@output: %s" % cmd) - for line in data: - if p.returncode != 0: - logging.warning(line) - else: - logging.debug(line) - if p.returncode != 0: - logging.error("end@output: %s" % cmd) - return p.returncode - - -if __name__ == '__main__': - print(rec_glob('.', '*.yml')) - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml deleted file mode 100644 index cf179bc..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: c -services: docker -os: linux -env: - - IMAGE=linux-x64 - # - IMAGE=windows-x86 - - IMAGE=windows-x64 - # - IMAGE=linux-x86 - - IMAGE=android-arm -# - IMAGE=browser-asmjs -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt deleted file mode 100644 index 5cd8b41..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt +++ /dev/null @@ -1,6 +0,0 @@ -project(cmaki_identifier_project CXX) -cmake_minimum_required(VERSION 3.0) -set(CMAKE_CXX_STANDARD 14) -include_directories(boostorg_predef/include) -enable_testing() -add_subdirectory(tests) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/README.md b/node_modules/npm-mas-mas/cmaki_identifier/README.md deleted file mode 100644 index e49baa2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# identify your platform - -gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_identifier.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_identifier) - -MSVC 2015: [![Build status](https://ci.appveyor.com/api/projects/status/tljl8xip6m8joi86?svg=true)](https://ci.appveyor.com/project/makiolo/cmaki-identifier) - -## travis: -- linux_64_glibc_2.19-gcc_4-debug -- linux_64_glibc_2.19-gcc_4-release -- linux_64_glibc_2.19-clang_3-debug -- linux_64_glibc_2.19-clang_3-release -- macos_64-clang_7-debug -- macos_64-clang_7-release - -## appveyor: -- windows_32-msvc_2015-debug -- windows_32-msvc_2015-release -- windows_64-msvc_2015-debug -- windows_64-msvc_2015-release diff --git a/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef b/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef deleted file mode 160000 index a2a5010..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/boostorg_predef +++ /dev/null @@ -1 +0,0 @@ -Subproject commit a2a5010e2824b7740890a3bf463b8c4b8927aaa7 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh deleted file mode 100644 index ebffa54..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# if [ $# -e 0 ]; then -# echo $0: [ERROR], usage: ./cmaki_emulator.sh -# exit 1 -# fi - -export DIRPROGRAM="$( cd "$( dirname "$1" )" >/dev/null && pwd )" -export BASENAMEPROGRAM=$(basename "$1") -export CMAKI_PWD="${CMAKI_PWD:-$(pwd)}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" -export LD_LIBRARY_PATH=$(pwd):$LD_LIBRARY_PATH - -if [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x86" ]]; then - cd ${DIRPROGRAM} - wine ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x64" ]]; then - cd ${DIRPROGRAM} - wine ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/android-arm" ]]; then - cd ${DIRPROGRAM} - unset LD_LIBRARY_PATH - qemu-arm -L /usr/arm-linux-gnueabi ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv6" ]]; then - cd ${DIRPROGRAM} - qemu-arm ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv7" ]]; then - cd ${DIRPROGRAM} - qemu-arm ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/browser-asmjs" ]]; then - cd ${DIRPROGRAM} - nodejs ./$BASENAMEPROGRAM "${@:2}" -else - $CMAKI_EMULATOR "$1" "${@:2}" -fi - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake deleted file mode 100644 index 7a50cc9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(PLATFORM "") -set(dirscript ${CMAKE_CURRENT_LIST_DIR}) -IF(WIN32) - set(executable cmaki_identifier.exe) -else() - set(executable cmaki_identifier.sh) -endif() -execute_process(COMMAND ${dirscript}/${executable} - OUTPUT_VARIABLE PLATFORM - OUTPUT_STRIP_TRAILING_WHITESPACE) -MESSAGE("${PLATFORM}") - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh b/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh deleted file mode 100755 index 371107b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -export DIRSCRIPT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_PWD="${CMAKI_PWD:-$DIRSCRIPT}/.." -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" - -if [ -f "cmaki_identifier.exe" ]; then - $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier.exe -else - $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier -fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt deleted file mode 100644 index 08224be..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeCache.txt +++ /dev/null @@ -1,113 +0,0 @@ -# This is the CMakeCache file. -# For build in directory: /home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug -# It was generated by CMake: /usr/local/bin/cmake -# You can edit this file to change values found and used by cmake. -# If you do not want to change any of the values, simply exit the editor. -# If you do want to change a value, simply edit, save, and exit the editor. -# The syntax for the file is as follows: -# KEY:TYPE=VALUE -# KEY is the name of a variable in the cache. -# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. -# VALUE is the current value for the KEY. - -######################## -# EXTERNAL cache entries -######################## - -//No help, variable specified on the command line. -CMAKE_BUILD_TYPE:UNINITIALIZED=Debug - -//No help, variable specified on the command line. -CMAKE_CXX_COMPILER:UNINITIALIZED=g++ - -//No help, variable specified on the command line. -CMAKE_C_COMPILER:UNINITIALIZED=gcc - -//Value Computed by CMake. -CMAKE_FIND_PACKAGE_REDIRECTS_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/pkgRedirects - -//No help, variable specified on the command line. -CMAKE_INSTALL_PREFIX:UNINITIALIZED=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/bin - -//No help, variable specified on the command line. -CMAKE_MODULE_PATH:UNINITIALIZED=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki - -//Value Computed by CMake -CMAKE_PROJECT_DESCRIPTION:STATIC= - -//Value Computed by CMake -CMAKE_PROJECT_HOMEPAGE_URL:STATIC= - -//Value Computed by CMake -CMAKE_PROJECT_NAME:STATIC=cmaki_identifier_project - -//The CMake toolchain file -CMAKE_TOOLCHAIN_FILE:FILEPATH=no cross compile - -//No help, variable specified on the command line. -COVERAGE:UNINITIALIZED=FALSE - -//No help, variable specified on the command line. -FIRST_ERROR:UNINITIALIZED=1 - -//No help, variable specified on the command line. -NPP_CACHE:UNINITIALIZED=TRUE - -//No help, variable specified on the command line. -TESTS_VALGRIND:UNINITIALIZED=FALSE - -//No help, variable specified on the command line. -WITH_CONAN:UNINITIALIZED=0 - -//Value Computed by CMake -cmaki_identifier_project_BINARY_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug - -//Value Computed by CMake -cmaki_identifier_project_IS_TOP_LEVEL:STATIC=ON - -//Value Computed by CMake -cmaki_identifier_project_SOURCE_DIR:STATIC=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier - - -######################## -# INTERNAL cache entries -######################## - -//This is the directory where this CMakeCache.txt was created -CMAKE_CACHEFILE_DIR:INTERNAL=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug -//Major version of cmake used to create the current loaded cache -CMAKE_CACHE_MAJOR_VERSION:INTERNAL=3 -//Minor version of cmake used to create the current loaded cache -CMAKE_CACHE_MINOR_VERSION:INTERNAL=31 -//Patch version of cmake used to create the current loaded cache -CMAKE_CACHE_PATCH_VERSION:INTERNAL=6 -//Path to CMake executable. -CMAKE_COMMAND:INTERNAL=/usr/local/bin/cmake -//Path to cpack program executable. -CMAKE_CPACK_COMMAND:INTERNAL=/usr/local/bin/cpack -//Path to ctest program executable. -CMAKE_CTEST_COMMAND:INTERNAL=/usr/local/bin/ctest -//Path to cache edit program executable. -CMAKE_EDIT_COMMAND:INTERNAL=/usr/local/bin/ccmake -//Name of external makefile project generator. -CMAKE_EXTRA_GENERATOR:INTERNAL= -//Name of generator. -CMAKE_GENERATOR:INTERNAL=Unix Makefiles -//Generator instance identifier. -CMAKE_GENERATOR_INSTANCE:INTERNAL= -//Name of generator platform. -CMAKE_GENERATOR_PLATFORM:INTERNAL= -//Name of generator toolset. -CMAKE_GENERATOR_TOOLSET:INTERNAL= -//Source directory with the top level CMakeLists.txt file for this -// project -CMAKE_HOME_DIRECTORY:INTERNAL=/home/runner/work/design-patterns-cpp14/design-patterns-cpp14/node_modules/npm-mas-mas/cmaki_identifier -//number of local generators -CMAKE_NUMBER_OF_MAKEFILES:INTERNAL=1 -//Platform information initialized -CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1 -//Path to CMake installation. -CMAKE_ROOT:INTERNAL=/usr/local/share/cmake-3.31 -//uname command -CMAKE_UNAME:INTERNAL=/usr/bin/uname - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache b/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache deleted file mode 100644 index 3dccd73..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/gcc/Debug/CMakeFiles/cmake.check_cache +++ /dev/null @@ -1 +0,0 @@ -# This file is generated by cmake for dependency checking of the CMakeCache.txt file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki deleted file mode 120000 index 1e97214..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/cmaki +++ /dev/null @@ -1 +0,0 @@ -../npm-mas-mas/cmaki_scripts/cmaki.js \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which deleted file mode 120000 index 6f8415e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.bin/node-which +++ /dev/null @@ -1 +0,0 @@ -../which/bin/node-which \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json deleted file mode 100644 index a3f6d56..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/.package-lock.json +++ /dev/null @@ -1,471 +0,0 @@ -{ - "name": "cmaki_identifier", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/npm-mas-mas": { - "version": "0.0.1", - "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", - "dev": true, - "license": "MIT", - "dependencies": { - "shelljs": ">=0.8.5" - }, - "bin": { - "cmaki": "cmaki_scripts/cmaki.js" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shelljs": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", - "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "execa": "^5.1.1", - "fast-glob": "^3.3.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md deleted file mode 100644 index e0b218b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/README.md +++ /dev/null @@ -1,171 +0,0 @@ -# @nodelib/fs.scandir - -> List files and directories inside the specified directory. - -## :bulb: Highlights - -The package is aimed at obtaining information about entries in the directory. - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode). -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.scandir -``` - -## Usage - -```ts -import * as fsScandir from '@nodelib/fs.scandir'; - -fsScandir.scandir('path', (error, stats) => { /* … */ }); -``` - -## API - -### .scandir(path, [optionsOrSettings], callback) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style. - -```ts -fsScandir.scandir('path', (error, entries) => { /* … */ }); -fsScandir.scandir('path', {}, (error, entries) => { /* … */ }); -fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ }); -``` - -### .scandirSync(path, [optionsOrSettings]) - -Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path. - -```ts -const entries = fsScandir.scandirSync('path'); -const entries = fsScandir.scandirSync('path', {}); -const entries = fsScandir.scandirSync(('path', new fsScandir.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsScandir.Settings({ followSymbolicLinks: false }); - -const entries = fsScandir.scandirSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class. -* `stats` (optional) — An instance of `fs.Stats` class. - -For example, the `scandir` call for `tools` directory with one directory inside: - -```ts -{ - dirent: Dirent { name: 'typedoc', /* … */ }, - name: 'typedoc', - path: 'tools/typedoc' -} -``` - -## Options - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO?? - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} - -const settings = new fsScandir.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## `old` and `modern` mode - -This package has two modes that are used depending on the environment and parameters of use. - -### old - -* Node.js below `10.10` or when the `stats` option is enabled - -When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links). - -### modern - -* Node.js 10.10+ and the `stats` option is disabled - -In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present. - -This mode makes fewer calls to the file system. It's faster. - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts deleted file mode 100644 index 827f1db..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type * as fsStat from '@nodelib/fs.stat'; -import type { Dirent, ErrnoException } from '../types'; -export interface ReaddirAsynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }, callback: (error: ErrnoException | null, files: Dirent[]) => void): void; - (filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void; -} -export interface ReaddirSynchronousMethod { - (filepath: string, options: { - withFileTypes: true; - }): Dirent[]; - (filepath: string): string[]; -} -export declare type FileSystemAdapter = fsStat.FileSystemAdapter & { - readdir: ReaddirAsynchronousMethod; - readdirSync: ReaddirSynchronousMethod; -}; -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js deleted file mode 100644 index f0fe022..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/adapters/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts deleted file mode 100644 index 33f1749..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/** - * IS `true` for Node.js 10.10 and greater. - */ -export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js deleted file mode 100644 index 7e3d441..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/constants.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0; -const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.'); -if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) { - throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`); -} -const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10); -const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10); -const SUPPORTED_MAJOR_VERSION = 10; -const SUPPORTED_MINOR_VERSION = 10; -const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION; -const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION; -/** - * IS `true` for Node.js 10.10 and greater. - */ -exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts deleted file mode 100644 index b9da83e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Dirent, Entry } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function scandir(path: string, callback: AsyncCallback): void; -declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace scandir { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[]; -export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js deleted file mode 100644 index 99c70d3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.scandirSync = exports.scandir = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function scandir(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.scandir = scandir; -function scandirSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.scandirSync = scandirSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts deleted file mode 100644 index 5829676..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -/// -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void; -export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void; -export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js deleted file mode 100644 index e8e2f0a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/async.js +++ /dev/null @@ -1,104 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const rpl = require("run-parallel"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings, callback) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - readdirWithFileTypes(directory, settings, callback); - return; - } - readdir(directory, settings, callback); -} -exports.read = read; -function readdirWithFileTypes(directory, settings, callback) { - settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const entries = dirents.map((dirent) => ({ - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - })); - if (!settings.followSymbolicLinks) { - callSuccessCallback(callback, entries); - return; - } - const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings)); - rpl(tasks, (rplError, rplEntries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, rplEntries); - }); - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function makeRplTaskEntry(entry, settings) { - return (done) => { - if (!entry.dirent.isSymbolicLink()) { - done(null, entry); - return; - } - settings.fs.stat(entry.path, (statError, stats) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - done(statError); - return; - } - done(null, entry); - return; - } - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - done(null, entry); - }); - }; -} -function readdir(directory, settings, callback) { - settings.fs.readdir(directory, (readdirError, names) => { - if (readdirError !== null) { - callFailureCallback(callback, readdirError); - return; - } - const tasks = names.map((name) => { - const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - return (done) => { - fsStat.stat(path, settings.fsStatSettings, (error, stats) => { - if (error !== null) { - done(error); - return; - } - const entry = { - name, - path, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - done(null, entry); - }); - }; - }); - rpl(tasks, (rplError, entries) => { - if (rplError !== null) { - callFailureCallback(callback, rplError); - return; - } - callSuccessCallback(callback, entries); - }); - }); -} -exports.readdir = readdir; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts deleted file mode 100644 index 2b4d08b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js deleted file mode 100644 index 8724cb5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/common.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = void 0; -function joinPathSegments(a, b, separator) { - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts deleted file mode 100644 index e05c8f0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type Settings from '../settings'; -import type { Entry } from '../types'; -export declare function read(directory: string, settings: Settings): Entry[]; -export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[]; -export declare function readdir(directory: string, settings: Settings): Entry[]; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js deleted file mode 100644 index 146db34..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/providers/sync.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.readdir = exports.readdirWithFileTypes = exports.read = void 0; -const fsStat = require("@nodelib/fs.stat"); -const constants_1 = require("../constants"); -const utils = require("../utils"); -const common = require("./common"); -function read(directory, settings) { - if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) { - return readdirWithFileTypes(directory, settings); - } - return readdir(directory, settings); -} -exports.read = read; -function readdirWithFileTypes(directory, settings) { - const dirents = settings.fs.readdirSync(directory, { withFileTypes: true }); - return dirents.map((dirent) => { - const entry = { - dirent, - name: dirent.name, - path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator) - }; - if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) { - try { - const stats = settings.fs.statSync(entry.path); - entry.dirent = utils.fs.createDirentFromStats(entry.name, stats); - } - catch (error) { - if (settings.throwErrorOnBrokenSymbolicLink) { - throw error; - } - } - } - return entry; - }); -} -exports.readdirWithFileTypes = readdirWithFileTypes; -function readdir(directory, settings) { - const names = settings.fs.readdirSync(directory); - return names.map((name) => { - const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator); - const stats = fsStat.statSync(entryPath, settings.fsStatSettings); - const entry = { - name, - path: entryPath, - dirent: utils.fs.createDirentFromStats(name, stats) - }; - if (settings.stats) { - entry.stats = stats; - } - return entry; - }); -} -exports.readdir = readdir; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts deleted file mode 100644 index a0db115..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLinks: boolean; - readonly fs: fs.FileSystemAdapter; - readonly pathSegmentSeparator: string; - readonly stats: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly fsStatSettings: fsStat.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js deleted file mode 100644 index 15a3e8c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/settings.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.stats = this._getValue(this._options.stats, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - this.fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this.followSymbolicLinks, - fs: this.fs, - throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts deleted file mode 100644 index f326c5e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -/// -import type * as fs from 'fs'; -export interface Entry { - dirent: Dirent; - name: string; - path: string; - stats?: Stats; -} -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; -export interface Dirent { - isBlockDevice: () => boolean; - isCharacterDevice: () => boolean; - isDirectory: () => boolean; - isFIFO: () => boolean; - isFile: () => boolean; - isSocket: () => boolean; - isSymbolicLink: () => boolean; - name: string; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts deleted file mode 100644 index bb863f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Dirent, Stats } from '../types'; -export declare function createDirentFromStats(name: string, stats: Stats): Dirent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts deleted file mode 100644 index 1b41954..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import * as fs from './fs'; -export { fs }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js deleted file mode 100644 index f5de129..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/out/utils/index.js +++ /dev/null @@ -1,5 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.fs = void 0; -const fs = require("./fs"); -exports.fs = fs; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json deleted file mode 100644 index d3a8924..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.scandir/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.scandir", - "version": "2.1.5", - "description": "List files and directories inside the specified directory", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "scandir", - "readdir", - "dirent" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4", - "@types/run-parallel": "^1.1.0" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md deleted file mode 100644 index 686f047..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# @nodelib/fs.stat - -> Get the status of a file with some features. - -## :bulb: Highlights - -Wrapper around standard method `fs.lstat` and `fs.stat` with some features. - -* :beginner: Normally follows symbolic link. -* :gear: Can safely work with broken symbolic link. - -## Install - -```console -npm install @nodelib/fs.stat -``` - -## Usage - -```ts -import * as fsStat from '@nodelib/fs.stat'; - -fsStat.stat('path', (error, stats) => { /* … */ }); -``` - -## API - -### .stat(path, [optionsOrSettings], callback) - -Returns an instance of `fs.Stats` class for provided path with standard callback-style. - -```ts -fsStat.stat('path', (error, stats) => { /* … */ }); -fsStat.stat('path', {}, (error, stats) => { /* … */ }); -fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ }); -``` - -### .statSync(path, [optionsOrSettings]) - -Returns an instance of `fs.Stats` class for provided path. - -```ts -const stats = fsStat.stat('path'); -const stats = fsStat.stat('path', {}); -const stats = fsStat.stat('path', new fsStat.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsStat.Settings({ followSymbolicLink: false }); - -const stats = fsStat.stat('path', settings); -``` - -## Options - -### `followSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`. - -### `markSymbolicLink` - -* Type: `boolean` -* Default: `false` - -Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`). - -> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `fs` - -* Type: [`FileSystemAdapter`](./src/adapters/fs.ts) -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; -} - -const settings = new fsStat.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts deleted file mode 100644 index 3af759c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -/// -import * as fs from 'fs'; -import type { ErrnoException } from '../types'; -export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void; -export declare type StatSynchronousMethod = (path: string) => fs.Stats; -export interface FileSystemAdapter { - lstat: StatAsynchronousMethod; - stat: StatAsynchronousMethod; - lstatSync: StatSynchronousMethod; - statSync: StatSynchronousMethod; -} -export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export declare function createFileSystemAdapter(fsMethods?: Partial): FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js deleted file mode 100644 index 8dc08c8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/adapters/fs.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -exports.FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - stat: fs.stat, - lstatSync: fs.lstatSync, - statSync: fs.statSync -}; -function createFileSystemAdapter(fsMethods) { - if (fsMethods === undefined) { - return exports.FILE_SYSTEM_ADAPTER; - } - return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods); -} -exports.createFileSystemAdapter = createFileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts deleted file mode 100644 index f95db99..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs'; -import * as async from './providers/async'; -import Settings, { Options } from './settings'; -import type { Stats } from './types'; -declare type AsyncCallback = async.AsyncCallback; -declare function stat(path: string, callback: AsyncCallback): void; -declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace stat { - function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats; -export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js deleted file mode 100644 index b23f751..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/index.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.statSync = exports.stat = exports.Settings = void 0; -const async = require("./providers/async"); -const sync = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function stat(path, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - async.read(path, getSettings(), optionsOrSettingsOrCallback); - return; - } - async.read(path, getSettings(optionsOrSettingsOrCallback), callback); -} -exports.stat = stat; -function statSync(path, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - return sync.read(path, settings); -} -exports.statSync = statSync; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts deleted file mode 100644 index 85423ce..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type Settings from '../settings'; -import type { ErrnoException, Stats } from '../types'; -export declare type AsyncCallback = (error: ErrnoException, stats: Stats) => void; -export declare function read(path: string, settings: Settings, callback: AsyncCallback): void; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js deleted file mode 100644 index 983ff0e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/async.js +++ /dev/null @@ -1,36 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings, callback) { - settings.fs.lstat(path, (lstatError, lstat) => { - if (lstatError !== null) { - callFailureCallback(callback, lstatError); - return; - } - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - callSuccessCallback(callback, lstat); - return; - } - settings.fs.stat(path, (statError, stat) => { - if (statError !== null) { - if (settings.throwErrorOnBrokenSymbolicLink) { - callFailureCallback(callback, statError); - return; - } - callSuccessCallback(callback, lstat); - return; - } - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - callSuccessCallback(callback, stat); - }); - }); -} -exports.read = read; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, result) { - callback(null, result); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts deleted file mode 100644 index 428c3d7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type Settings from '../settings'; -import type { Stats } from '../types'; -export declare function read(path: string, settings: Settings): Stats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js deleted file mode 100644 index 1521c36..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; -function read(path, settings) { - const lstat = settings.fs.lstatSync(path); - if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { - return lstat; - } - try { - const stat = settings.fs.statSync(path); - if (settings.markSymbolicLink) { - stat.isSymbolicLink = () => true; - } - return stat; - } - catch (error) { - if (!settings.throwErrorOnBrokenSymbolicLink) { - return lstat; - } - throw error; - } -} -exports.read = read; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts deleted file mode 100644 index f4b3d44..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import * as fs from './adapters/fs'; -export interface Options { - followSymbolicLink?: boolean; - fs?: Partial; - markSymbolicLink?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly followSymbolicLink: boolean; - readonly fs: fs.FileSystemAdapter; - readonly markSymbolicLink: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js deleted file mode 100644 index 111ec09..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/settings.js +++ /dev/null @@ -1,16 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = require("./adapters/fs"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs.createFileSystemAdapter(this._options.fs); - this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts deleted file mode 100644 index 74c08ed..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import type * as fs from 'fs'; -export declare type Stats = fs.Stats; -export declare type ErrnoException = NodeJS.ErrnoException; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json deleted file mode 100644 index f2540c2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.stat/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "@nodelib/fs.stat", - "version": "2.0.5", - "description": "Get the status of a file with some features", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.stat", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "stat" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562" -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md deleted file mode 100644 index 6ccc08d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# @nodelib/fs.walk - -> A library for efficiently walking a directory recursively. - -## :bulb: Highlights - -* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional). -* :rocket: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type for performance reasons. See [`old` and `modern` mode](https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode). -* :gear: Built-in directories/files and error filtering system. -* :link: Can safely work with broken symbolic links. - -## Install - -```console -npm install @nodelib/fs.walk -``` - -## Usage - -```ts -import * as fsWalk from '@nodelib/fs.walk'; - -fsWalk.walk('path', (error, entries) => { /* … */ }); -``` - -## API - -### .walk(path, [optionsOrSettings], callback) - -Reads the directory recursively and asynchronously. Requires a callback function. - -> :book: If you want to use the Promise API, use `util.promisify`. - -```ts -fsWalk.walk('path', (error, entries) => { /* … */ }); -fsWalk.walk('path', {}, (error, entries) => { /* … */ }); -fsWalk.walk('path', new fsWalk.Settings(), (error, entries) => { /* … */ }); -``` - -### .walkStream(path, [optionsOrSettings]) - -Reads the directory recursively and asynchronously. [Readable Stream](https://nodejs.org/dist/latest-v12.x/docs/api/stream.html#stream_readable_streams) is used as a provider. - -```ts -const stream = fsWalk.walkStream('path'); -const stream = fsWalk.walkStream('path', {}); -const stream = fsWalk.walkStream('path', new fsWalk.Settings()); -``` - -### .walkSync(path, [optionsOrSettings]) - -Reads the directory recursively and synchronously. Returns an array of entries. - -```ts -const entries = fsWalk.walkSync('path'); -const entries = fsWalk.walkSync('path', {}); -const entries = fsWalk.walkSync('path', new fsWalk.Settings()); -``` - -#### path - -* Required: `true` -* Type: `string | Buffer | URL` - -A path to a file. If a URL is provided, it must use the `file:` protocol. - -#### optionsOrSettings - -* Required: `false` -* Type: `Options | Settings` -* Default: An instance of `Settings` class - -An [`Options`](#options) object or an instance of [`Settings`](#settings) class. - -> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class. - -### Settings([options]) - -A class of full settings of the package. - -```ts -const settings = new fsWalk.Settings({ followSymbolicLinks: true }); - -const entries = fsWalk.walkSync('path', settings); -``` - -## Entry - -* `name` — The name of the entry (`unknown.txt`). -* `path` — The path of the entry relative to call directory (`root/unknown.txt`). -* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. -* [`stats`] — An instance of `fs.Stats` class. - -## Options - -### basePath - -* Type: `string` -* Default: `undefined` - -By default, all paths are built relative to the root path. You can use this option to set custom root path. - -In the example below we read the files from the `root` directory, but in the results the root path will be `custom`. - -```ts -fsWalk.walkSync('root'); // → ['root/file.txt'] -fsWalk.walkSync('root', { basePath: 'custom' }); // → ['custom/file.txt'] -``` - -### concurrency - -* Type: `number` -* Default: `Infinity` - -The maximum number of concurrent calls to `fs.readdir`. - -> :book: The higher the number, the higher performance and the load on the File System. If you want to read in quiet mode, set the value to `4 * os.cpus().length` (4 is default size of [thread pool work scheduling](http://docs.libuv.org/en/v1.x/threadpool.html#thread-pool-work-scheduling)). - -### deepFilter - -* Type: [`DeepFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the directory will be read deep or not. - -```ts -// Skip all directories that starts with `node_modules` -const filter: DeepFilterFunction = (entry) => !entry.path.startsWith('node_modules'); -``` - -### entryFilter - -* Type: [`EntryFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that indicates whether the entry will be included to results or not. - -```ts -// Exclude all `.js` files from results -const filter: EntryFilterFunction = (entry) => !entry.name.endsWith('.js'); -``` - -### errorFilter - -* Type: [`ErrorFilterFunction`](./src/settings.ts) -* Default: `undefined` - -A function that allows you to skip errors that occur when reading directories. - -For example, you can skip `ENOENT` errors if required: - -```ts -// Skip all ENOENT errors -const filter: ErrorFilterFunction = (error) => error.code == 'ENOENT'; -``` - -### stats - -* Type: `boolean` -* Default: `false` - -Adds an instance of `fs.Stats` class to the [`Entry`](#entry). - -> :book: Always use `fs.readdir` with additional `fs.lstat/fs.stat` calls to determine the entry type. - -### followSymbolicLinks - -* Type: `boolean` -* Default: `false` - -Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`. - -### `throwErrorOnBrokenSymbolicLink` - -* Type: `boolean` -* Default: `true` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -### `pathSegmentSeparator` - -* Type: `string` -* Default: `path.sep` - -By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead. - -### `fs` - -* Type: `FileSystemAdapter` -* Default: A default FS methods - -By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own. - -```ts -interface FileSystemAdapter { - lstat: typeof fs.lstat; - stat: typeof fs.stat; - lstatSync: typeof fs.lstatSync; - statSync: typeof fs.statSync; - readdir: typeof fs.readdir; - readdirSync: typeof fs.readdirSync; -} - -const settings = new fsWalk.Settings({ - fs: { lstat: fakeLstat } -}); -``` - -## Changelog - -See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts deleted file mode 100644 index 8864c7b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import type { Readable } from 'stream'; -import type { Dirent, FileSystemAdapter } from '@nodelib/fs.scandir'; -import { AsyncCallback } from './providers/async'; -import Settings, { DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction, Options } from './settings'; -import type { Entry } from './types'; -declare function walk(directory: string, callback: AsyncCallback): void; -declare function walk(directory: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void; -declare namespace walk { - function __promisify__(directory: string, optionsOrSettings?: Options | Settings): Promise; -} -declare function walkSync(directory: string, optionsOrSettings?: Options | Settings): Entry[]; -declare function walkStream(directory: string, optionsOrSettings?: Options | Settings): Readable; -export { walk, walkSync, walkStream, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, Options, DeepFilterFunction, EntryFilterFunction, ErrorFilterFunction }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js deleted file mode 100644 index 1520787..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/index.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Settings = exports.walkStream = exports.walkSync = exports.walk = void 0; -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -exports.Settings = settings_1.default; -function walk(directory, optionsOrSettingsOrCallback, callback) { - if (typeof optionsOrSettingsOrCallback === 'function') { - new async_1.default(directory, getSettings()).read(optionsOrSettingsOrCallback); - return; - } - new async_1.default(directory, getSettings(optionsOrSettingsOrCallback)).read(callback); -} -exports.walk = walk; -function walkSync(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new sync_1.default(directory, settings); - return provider.read(); -} -exports.walkSync = walkSync; -function walkStream(directory, optionsOrSettings) { - const settings = getSettings(optionsOrSettings); - const provider = new stream_1.default(directory, settings); - return provider.read(); -} -exports.walkStream = walkStream; -function getSettings(settingsOrOptions = {}) { - if (settingsOrOptions instanceof settings_1.default) { - return settingsOrOptions; - } - return new settings_1.default(settingsOrOptions); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts deleted file mode 100644 index 0f6717d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -export declare type AsyncCallback = (error: Errno, entries: Entry[]) => void; -export default class AsyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - private readonly _storage; - constructor(_root: string, _settings: Settings); - read(callback: AsyncCallback): void; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js deleted file mode 100644 index 51d3be5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/async.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -class AsyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._storage = []; - } - read(callback) { - this._reader.onError((error) => { - callFailureCallback(callback, error); - }); - this._reader.onEntry((entry) => { - this._storage.push(entry); - }); - this._reader.onEnd(() => { - callSuccessCallback(callback, this._storage); - }); - this._reader.read(); - } -} -exports.default = AsyncProvider; -function callFailureCallback(callback, error) { - callback(error); -} -function callSuccessCallback(callback, entries) { - callback(null, entries); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts deleted file mode 100644 index 874f60c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import AsyncProvider from './async'; -import StreamProvider from './stream'; -import SyncProvider from './sync'; -export { AsyncProvider, StreamProvider, SyncProvider }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js deleted file mode 100644 index 4c2529c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/index.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SyncProvider = exports.StreamProvider = exports.AsyncProvider = void 0; -const async_1 = require("./async"); -exports.AsyncProvider = async_1.default; -const stream_1 = require("./stream"); -exports.StreamProvider = stream_1.default; -const sync_1 = require("./sync"); -exports.SyncProvider = sync_1.default; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts deleted file mode 100644 index 294185f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -/// -import { Readable } from 'stream'; -import AsyncReader from '../readers/async'; -import type Settings from '../settings'; -export default class StreamProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: AsyncReader; - protected readonly _stream: Readable; - constructor(_root: string, _settings: Settings); - read(): Readable; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js deleted file mode 100644 index 51298b0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/stream.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const async_1 = require("../readers/async"); -class StreamProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new async_1.default(this._root, this._settings); - this._stream = new stream_1.Readable({ - objectMode: true, - read: () => { }, - destroy: () => { - if (!this._reader.isDestroyed) { - this._reader.destroy(); - } - } - }); - } - read() { - this._reader.onError((error) => { - this._stream.emit('error', error); - }); - this._reader.onEntry((entry) => { - this._stream.push(entry); - }); - this._reader.onEnd(() => { - this._stream.push(null); - }); - this._reader.read(); - return this._stream; - } -} -exports.default = StreamProvider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts deleted file mode 100644 index 551c42e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import SyncReader from '../readers/sync'; -import type Settings from '../settings'; -import type { Entry } from '../types'; -export default class SyncProvider { - private readonly _root; - private readonly _settings; - protected readonly _reader: SyncReader; - constructor(_root: string, _settings: Settings); - read(): Entry[]; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js deleted file mode 100644 index faab6ca..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/providers/sync.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -class SyncProvider { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._reader = new sync_1.default(this._root, this._settings); - } - read() { - return this._reader.read(); - } -} -exports.default = SyncProvider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts deleted file mode 100644 index 9acf4e6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -/// -import { EventEmitter } from 'events'; -import * as fsScandir from '@nodelib/fs.scandir'; -import type Settings from '../settings'; -import type { Entry, Errno } from '../types'; -import Reader from './reader'; -declare type EntryEventCallback = (entry: Entry) => void; -declare type ErrorEventCallback = (error: Errno) => void; -declare type EndEventCallback = () => void; -export default class AsyncReader extends Reader { - protected readonly _settings: Settings; - protected readonly _scandir: typeof fsScandir.scandir; - protected readonly _emitter: EventEmitter; - private readonly _queue; - private _isFatalError; - private _isDestroyed; - constructor(_root: string, _settings: Settings); - read(): EventEmitter; - get isDestroyed(): boolean; - destroy(): void; - onEntry(callback: EntryEventCallback): void; - onError(callback: ErrorEventCallback): void; - onEnd(callback: EndEventCallback): void; - private _pushToQueue; - private _worker; - private _handleError; - private _handleEntry; - private _emitEntry; -} -export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js deleted file mode 100644 index ebe8dd5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/async.js +++ /dev/null @@ -1,97 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const events_1 = require("events"); -const fsScandir = require("@nodelib/fs.scandir"); -const fastq = require("fastq"); -const common = require("./common"); -const reader_1 = require("./reader"); -class AsyncReader extends reader_1.default { - constructor(_root, _settings) { - super(_root, _settings); - this._settings = _settings; - this._scandir = fsScandir.scandir; - this._emitter = new events_1.EventEmitter(); - this._queue = fastq(this._worker.bind(this), this._settings.concurrency); - this._isFatalError = false; - this._isDestroyed = false; - this._queue.drain = () => { - if (!this._isFatalError) { - this._emitter.emit('end'); - } - }; - } - read() { - this._isFatalError = false; - this._isDestroyed = false; - setImmediate(() => { - this._pushToQueue(this._root, this._settings.basePath); - }); - return this._emitter; - } - get isDestroyed() { - return this._isDestroyed; - } - destroy() { - if (this._isDestroyed) { - throw new Error('The reader is already destroyed'); - } - this._isDestroyed = true; - this._queue.killAndDrain(); - } - onEntry(callback) { - this._emitter.on('entry', callback); - } - onError(callback) { - this._emitter.once('error', callback); - } - onEnd(callback) { - this._emitter.once('end', callback); - } - _pushToQueue(directory, base) { - const queueItem = { directory, base }; - this._queue.push(queueItem, (error) => { - if (error !== null) { - this._handleError(error); - } - }); - } - _worker(item, done) { - this._scandir(item.directory, this._settings.fsScandirSettings, (error, entries) => { - if (error !== null) { - done(error, undefined); - return; - } - for (const entry of entries) { - this._handleEntry(entry, item.base); - } - done(null, undefined); - }); - } - _handleError(error) { - if (this._isDestroyed || !common.isFatalError(this._settings, error)) { - return; - } - this._isFatalError = true; - this._isDestroyed = true; - this._emitter.emit('error', error); - } - _handleEntry(entry, base) { - if (this._isDestroyed || this._isFatalError) { - return; - } - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._emitEntry(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _emitEntry(entry) { - this._emitter.emit('entry', entry); - } -} -exports.default = AsyncReader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts deleted file mode 100644 index 5985f97..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { FilterFunction } from '../settings'; -import type Settings from '../settings'; -import type { Errno } from '../types'; -export declare function isFatalError(settings: Settings, error: Errno): boolean; -export declare function isAppliedFilter(filter: FilterFunction | null, value: T): boolean; -export declare function replacePathSegmentSeparator(filepath: string, separator: string): string; -export declare function joinPathSegments(a: string, b: string, separator: string): string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js deleted file mode 100644 index a93572f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/common.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.joinPathSegments = exports.replacePathSegmentSeparator = exports.isAppliedFilter = exports.isFatalError = void 0; -function isFatalError(settings, error) { - if (settings.errorFilter === null) { - return true; - } - return !settings.errorFilter(error); -} -exports.isFatalError = isFatalError; -function isAppliedFilter(filter, value) { - return filter === null || filter(value); -} -exports.isAppliedFilter = isAppliedFilter; -function replacePathSegmentSeparator(filepath, separator) { - return filepath.split(/[/\\]/).join(separator); -} -exports.replacePathSegmentSeparator = replacePathSegmentSeparator; -function joinPathSegments(a, b, separator) { - if (a === '') { - return b; - } - /** - * The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`). - */ - if (a.endsWith(separator)) { - return a + b; - } - return a + separator + b; -} -exports.joinPathSegments = joinPathSegments; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts deleted file mode 100644 index e1f383b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type Settings from '../settings'; -export default class Reader { - protected readonly _root: string; - protected readonly _settings: Settings; - constructor(_root: string, _settings: Settings); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js deleted file mode 100644 index 782f07c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/reader.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const common = require("./common"); -class Reader { - constructor(_root, _settings) { - this._root = _root; - this._settings = _settings; - this._root = common.replacePathSegmentSeparator(_root, _settings.pathSegmentSeparator); - } -} -exports.default = Reader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts deleted file mode 100644 index af41033..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry } from '../types'; -import Reader from './reader'; -export default class SyncReader extends Reader { - protected readonly _scandir: typeof fsScandir.scandirSync; - private readonly _storage; - private readonly _queue; - read(): Entry[]; - private _pushToQueue; - private _handleQueue; - private _handleDirectory; - private _handleError; - private _handleEntry; - private _pushToStorage; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js deleted file mode 100644 index 9a8d5a6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/readers/sync.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsScandir = require("@nodelib/fs.scandir"); -const common = require("./common"); -const reader_1 = require("./reader"); -class SyncReader extends reader_1.default { - constructor() { - super(...arguments); - this._scandir = fsScandir.scandirSync; - this._storage = []; - this._queue = new Set(); - } - read() { - this._pushToQueue(this._root, this._settings.basePath); - this._handleQueue(); - return this._storage; - } - _pushToQueue(directory, base) { - this._queue.add({ directory, base }); - } - _handleQueue() { - for (const item of this._queue.values()) { - this._handleDirectory(item.directory, item.base); - } - } - _handleDirectory(directory, base) { - try { - const entries = this._scandir(directory, this._settings.fsScandirSettings); - for (const entry of entries) { - this._handleEntry(entry, base); - } - } - catch (error) { - this._handleError(error); - } - } - _handleError(error) { - if (!common.isFatalError(this._settings, error)) { - return; - } - throw error; - } - _handleEntry(entry, base) { - const fullpath = entry.path; - if (base !== undefined) { - entry.path = common.joinPathSegments(base, entry.name, this._settings.pathSegmentSeparator); - } - if (common.isAppliedFilter(this._settings.entryFilter, entry)) { - this._pushToStorage(entry); - } - if (entry.dirent.isDirectory() && common.isAppliedFilter(this._settings.deepFilter, entry)) { - this._pushToQueue(fullpath, base === undefined ? undefined : entry.path); - } - } - _pushToStorage(entry) { - this._storage.push(entry); - } -} -exports.default = SyncReader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts deleted file mode 100644 index d1c4b45..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as fsScandir from '@nodelib/fs.scandir'; -import type { Entry, Errno } from './types'; -export declare type FilterFunction = (value: T) => boolean; -export declare type DeepFilterFunction = FilterFunction; -export declare type EntryFilterFunction = FilterFunction; -export declare type ErrorFilterFunction = FilterFunction; -export interface Options { - basePath?: string; - concurrency?: number; - deepFilter?: DeepFilterFunction; - entryFilter?: EntryFilterFunction; - errorFilter?: ErrorFilterFunction; - followSymbolicLinks?: boolean; - fs?: Partial; - pathSegmentSeparator?: string; - stats?: boolean; - throwErrorOnBrokenSymbolicLink?: boolean; -} -export default class Settings { - private readonly _options; - readonly basePath?: string; - readonly concurrency: number; - readonly deepFilter: DeepFilterFunction | null; - readonly entryFilter: EntryFilterFunction | null; - readonly errorFilter: ErrorFilterFunction | null; - readonly pathSegmentSeparator: string; - readonly fsScandirSettings: fsScandir.Settings; - constructor(_options?: Options); - private _getValue; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js deleted file mode 100644 index d7a85c8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/settings.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsScandir = require("@nodelib/fs.scandir"); -class Settings { - constructor(_options = {}) { - this._options = _options; - this.basePath = this._getValue(this._options.basePath, undefined); - this.concurrency = this._getValue(this._options.concurrency, Number.POSITIVE_INFINITY); - this.deepFilter = this._getValue(this._options.deepFilter, null); - this.entryFilter = this._getValue(this._options.entryFilter, null); - this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep); - this.fsScandirSettings = new fsScandir.Settings({ - followSymbolicLinks: this._options.followSymbolicLinks, - fs: this._options.fs, - pathSegmentSeparator: this._options.pathSegmentSeparator, - stats: this._options.stats, - throwErrorOnBrokenSymbolicLink: this._options.throwErrorOnBrokenSymbolicLink - }); - } - _getValue(option, value) { - return option !== null && option !== void 0 ? option : value; - } -} -exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts deleted file mode 100644 index 6ee9bd3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/// -import type * as scandir from '@nodelib/fs.scandir'; -export declare type Entry = scandir.Entry; -export declare type Errno = NodeJS.ErrnoException; -export interface QueueItem { - directory: string; - base?: string; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json deleted file mode 100644 index 86bfce4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/@nodelib/fs.walk/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nodelib/fs.walk", - "version": "1.2.8", - "description": "A library for efficiently walking a directory recursively", - "license": "MIT", - "repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.walk", - "keywords": [ - "NodeLib", - "fs", - "FileSystem", - "file system", - "walk", - "scanner", - "crawler" - ], - "engines": { - "node": ">= 8" - }, - "files": [ - "out/**", - "!out/**/*.map", - "!out/**/*.spec.*", - "!out/**/tests/**" - ], - "main": "out/index.js", - "typings": "out/index.d.ts", - "scripts": { - "clean": "rimraf {tsconfig.tsbuildinfo,out}", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc -b .", - "compile:watch": "tsc -p . --watch --sourceMap", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile:watch" - }, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "devDependencies": { - "@nodelib/fs.macchiato": "1.0.4" - }, - "gitHead": "1e5bad48565da2b06b8600e744324ea240bf49d8" -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md deleted file mode 100644 index f59dd60..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/README.md +++ /dev/null @@ -1,586 +0,0 @@ -# braces [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/braces.svg?style=flat)](https://www.npmjs.com/package/braces) [![NPM monthly downloads](https://img.shields.io/npm/dm/braces.svg?style=flat)](https://npmjs.org/package/braces) [![NPM total downloads](https://img.shields.io/npm/dt/braces.svg?style=flat)](https://npmjs.org/package/braces) [![Linux Build Status](https://img.shields.io/travis/micromatch/braces.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/braces) - -> Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save braces -``` - -## v3.0.0 Released!! - -See the [changelog](CHANGELOG.md) for details. - -## Why use braces? - -Brace patterns make globs more powerful by adding the ability to match specific ranges and sequences of characters. - -- **Accurate** - complete support for the [Bash 4.3 Brace Expansion](www.gnu.org/software/bash/) specification (passes all of the Bash braces tests) -- **[fast and performant](#benchmarks)** - Starts fast, runs fast and [scales well](#performance) as patterns increase in complexity. -- **Organized code base** - The parser and compiler are easy to maintain and update when edge cases crop up. -- **Well-tested** - Thousands of test assertions, and passes all of the Bash, minimatch, and [brace-expansion](https://github.com/juliangruber/brace-expansion) unit tests (as of the date this was written). -- **Safer** - You shouldn't have to worry about users defining aggressive or malicious brace patterns that can break your application. Braces takes measures to prevent malicious regex that can be used for DDoS attacks (see [catastrophic backtracking](https://www.regular-expressions.info/catastrophic.html)). -- [Supports lists](#lists) - (aka "sets") `a/{b,c}/d` => `['a/b/d', 'a/c/d']` -- [Supports sequences](#sequences) - (aka "ranges") `{01..03}` => `['01', '02', '03']` -- [Supports steps](#steps) - (aka "increments") `{2..10..2}` => `['2', '4', '6', '8', '10']` -- [Supports escaping](#escaping) - To prevent evaluation of special characters. - -## Usage - -The main export is a function that takes one or more brace `patterns` and `options`. - -```js -const braces = require('braces'); -// braces(patterns[, options]); - -console.log(braces(['{01..05}', '{a..e}'])); -//=> ['(0[1-5])', '([a-e])'] - -console.log(braces(['{01..05}', '{a..e}'], { expand: true })); -//=> ['01', '02', '03', '04', '05', 'a', 'b', 'c', 'd', 'e'] -``` - -### Brace Expansion vs. Compilation - -By default, brace patterns are compiled into strings that are optimized for creating regular expressions and matching. - -**Compiled** - -```js -console.log(braces('a/{x,y,z}/b')); -//=> ['a/(x|y|z)/b'] -console.log(braces(['a/{01..20}/b', 'a/{1..5}/b'])); -//=> [ 'a/(0[1-9]|1[0-9]|20)/b', 'a/([1-5])/b' ] -``` - -**Expanded** - -Enable brace expansion by setting the `expand` option to true, or by using [braces.expand()](#expand) (returns an array similar to what you'd expect from Bash, or `echo {1..5}`, or [minimatch](https://github.com/isaacs/minimatch)): - -```js -console.log(braces('a/{x,y,z}/b', { expand: true })); -//=> ['a/x/b', 'a/y/b', 'a/z/b'] - -console.log(braces.expand('{01..10}')); -//=> ['01','02','03','04','05','06','07','08','09','10'] -``` - -### Lists - -Expand lists (like Bash "sets"): - -```js -console.log(braces('a/{foo,bar,baz}/*.js')); -//=> ['a/(foo|bar|baz)/*.js'] - -console.log(braces.expand('a/{foo,bar,baz}/*.js')); -//=> ['a/foo/*.js', 'a/bar/*.js', 'a/baz/*.js'] -``` - -### Sequences - -Expand ranges of characters (like Bash "sequences"): - -```js -console.log(braces.expand('{1..3}')); // ['1', '2', '3'] -console.log(braces.expand('a/{1..3}/b')); // ['a/1/b', 'a/2/b', 'a/3/b'] -console.log(braces('{a..c}', { expand: true })); // ['a', 'b', 'c'] -console.log(braces('foo/{a..c}', { expand: true })); // ['foo/a', 'foo/b', 'foo/c'] - -// supports zero-padded ranges -console.log(braces('a/{01..03}/b')); //=> ['a/(0[1-3])/b'] -console.log(braces('a/{001..300}/b')); //=> ['a/(0{2}[1-9]|0[1-9][0-9]|[12][0-9]{2}|300)/b'] -``` - -See [fill-range](https://github.com/jonschlinkert/fill-range) for all available range-expansion options. - -### Steppped ranges - -Steps, or increments, may be used with ranges: - -```js -console.log(braces.expand('{2..10..2}')); -//=> ['2', '4', '6', '8', '10'] - -console.log(braces('{2..10..2}')); -//=> ['(2|4|6|8|10)'] -``` - -When the [.optimize](#optimize) method is used, or [options.optimize](#optionsoptimize) is set to true, sequences are passed to [to-regex-range](https://github.com/jonschlinkert/to-regex-range) for expansion. - -### Nesting - -Brace patterns may be nested. The results of each expanded string are not sorted, and left to right order is preserved. - -**"Expanded" braces** - -```js -console.log(braces.expand('a{b,c,/{x,y}}/e')); -//=> ['ab/e', 'ac/e', 'a/x/e', 'a/y/e'] - -console.log(braces.expand('a/{x,{1..5},y}/c')); -//=> ['a/x/c', 'a/1/c', 'a/2/c', 'a/3/c', 'a/4/c', 'a/5/c', 'a/y/c'] -``` - -**"Optimized" braces** - -```js -console.log(braces('a{b,c,/{x,y}}/e')); -//=> ['a(b|c|/(x|y))/e'] - -console.log(braces('a/{x,{1..5},y}/c')); -//=> ['a/(x|([1-5])|y)/c'] -``` - -### Escaping - -**Escaping braces** - -A brace pattern will not be expanded or evaluted if _either the opening or closing brace is escaped_: - -```js -console.log(braces.expand('a\\{d,c,b}e')); -//=> ['a{d,c,b}e'] - -console.log(braces.expand('a{d,c,b\\}e')); -//=> ['a{d,c,b}e'] -``` - -**Escaping commas** - -Commas inside braces may also be escaped: - -```js -console.log(braces.expand('a{b\\,c}d')); -//=> ['a{b,c}d'] - -console.log(braces.expand('a{d\\,c,b}e')); -//=> ['ad,ce', 'abe'] -``` - -**Single items** - -Following bash conventions, a brace pattern is also not expanded when it contains a single character: - -```js -console.log(braces.expand('a{b}c')); -//=> ['a{b}c'] -``` - -## Options - -### options.maxLength - -**Type**: `Number` - -**Default**: `10,000` - -**Description**: Limit the length of the input string. Useful when the input string is generated or your application allows users to pass a string, et cetera. - -```js -console.log(braces('a/{b,c}/d', { maxLength: 3 })); //=> throws an error -``` - -### options.expand - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Generate an "expanded" brace pattern (alternatively you can use the `braces.expand()` method, which does the same thing). - -```js -console.log(braces('a/{b,c}/d', { expand: true })); -//=> [ 'a/b/d', 'a/c/d' ] -``` - -### options.nodupes - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Remove duplicates from the returned array. - -### options.rangeLimit - -**Type**: `Number` - -**Default**: `1000` - -**Description**: To prevent malicious patterns from being passed by users, an error is thrown when `braces.expand()` is used or `options.expand` is true and the generated range will exceed the `rangeLimit`. - -You can customize `options.rangeLimit` or set it to `Inifinity` to disable this altogether. - -**Examples** - -```js -// pattern exceeds the "rangeLimit", so it's optimized automatically -console.log(braces.expand('{1..1000}')); -//=> ['([1-9]|[1-9][0-9]{1,2}|1000)'] - -// pattern does not exceed "rangeLimit", so it's NOT optimized -console.log(braces.expand('{1..100}')); -//=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '100'] -``` - -### options.transform - -**Type**: `Function` - -**Default**: `undefined` - -**Description**: Customize range expansion. - -**Example: Transforming non-numeric values** - -```js -const alpha = braces.expand('x/{a..e}/y', { - transform(value, index) { - // When non-numeric values are passed, "value" is a character code. - return 'foo/' + String.fromCharCode(value) + '-' + index; - }, -}); -console.log(alpha); -//=> [ 'x/foo/a-0/y', 'x/foo/b-1/y', 'x/foo/c-2/y', 'x/foo/d-3/y', 'x/foo/e-4/y' ] -``` - -**Example: Transforming numeric values** - -```js -const numeric = braces.expand('{1..5}', { - transform(value) { - // when numeric values are passed, "value" is a number - return 'foo/' + value * 2; - }, -}); -console.log(numeric); -//=> [ 'foo/2', 'foo/4', 'foo/6', 'foo/8', 'foo/10' ] -``` - -### options.quantifiers - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: In regular expressions, quanitifiers can be used to specify how many times a token can be repeated. For example, `a{1,3}` will match the letter `a` one to three times. - -Unfortunately, regex quantifiers happen to share the same syntax as [Bash lists](#lists) - -The `quantifiers` option tells braces to detect when [regex quantifiers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#quantifiers) are defined in the given pattern, and not to try to expand them as lists. - -**Examples** - -```js -const braces = require('braces'); -console.log(braces('a/b{1,3}/{x,y,z}')); -//=> [ 'a/b(1|3)/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true })); -//=> [ 'a/b{1,3}/(x|y|z)' ] -console.log(braces('a/b{1,3}/{x,y,z}', { quantifiers: true, expand: true })); -//=> [ 'a/b{1,3}/x', 'a/b{1,3}/y', 'a/b{1,3}/z' ] -``` - -### options.keepEscaping - -**Type**: `Boolean` - -**Default**: `undefined` - -**Description**: Do not strip backslashes that were used for escaping from the result. - -## What is "brace expansion"? - -Brace expansion is a type of parameter expansion that was made popular by unix shells for generating lists of strings, as well as regex-like matching when used alongside wildcards (globs). - -In addition to "expansion", braces are also used for matching. In other words: - -- [brace expansion](#brace-expansion) is for generating new lists -- [brace matching](#brace-matching) is for filtering existing lists - -
-More about brace expansion (click to expand) - -There are two main types of brace expansion: - -1. **lists**: which are defined using comma-separated values inside curly braces: `{a,b,c}` -2. **sequences**: which are defined using a starting value and an ending value, separated by two dots: `a{1..3}b`. Optionally, a third argument may be passed to define a "step" or increment to use: `a{1..100..10}b`. These are also sometimes referred to as "ranges". - -Here are some example brace patterns to illustrate how they work: - -**Sets** - -``` -{a,b,c} => a b c -{a,b,c}{1,2} => a1 a2 b1 b2 c1 c2 -``` - -**Sequences** - -``` -{1..9} => 1 2 3 4 5 6 7 8 9 -{4..-4} => 4 3 2 1 0 -1 -2 -3 -4 -{1..20..3} => 1 4 7 10 13 16 19 -{a..j} => a b c d e f g h i j -{j..a} => j i h g f e d c b a -{a..z..3} => a d g j m p s v y -``` - -**Combination** - -Sets and sequences can be mixed together or used along with any other strings. - -``` -{a,b,c}{1..3} => a1 a2 a3 b1 b2 b3 c1 c2 c3 -foo/{a,b,c}/bar => foo/a/bar foo/b/bar foo/c/bar -``` - -The fact that braces can be "expanded" from relatively simple patterns makes them ideal for quickly generating test fixtures, file paths, and similar use cases. - -## Brace matching - -In addition to _expansion_, brace patterns are also useful for performing regular-expression-like matching. - -For example, the pattern `foo/{1..3}/bar` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -``` - -But not: - -``` -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Braces can also be combined with [glob patterns](https://github.com/jonschlinkert/micromatch) to perform more advanced wildcard matching. For example, the pattern `*/{1..3}/*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -## Brace matching pitfalls - -Although brace patterns offer a user-friendly way of matching ranges or sets of strings, there are also some major disadvantages and potential risks you should be aware of. - -### tldr - -**"brace bombs"** - -- brace expansion can eat up a huge amount of processing resources -- as brace patterns increase _linearly in size_, the system resources required to expand the pattern increase exponentially -- users can accidentally (or intentially) exhaust your system's resources resulting in the equivalent of a DoS attack (bonus: no programming knowledge is required!) - -For a more detailed explanation with examples, see the [geometric complexity](#geometric-complexity) section. - -### The solution - -Jump to the [performance section](#performance) to see how Braces solves this problem in comparison to other libraries. - -### Geometric complexity - -At minimum, brace patterns with sets limited to two elements have quadradic or `O(n^2)` complexity. But the complexity of the algorithm increases exponentially as the number of sets, _and elements per set_, increases, which is `O(n^c)`. - -For example, the following sets demonstrate quadratic (`O(n^2)`) complexity: - -``` -{1,2}{3,4} => (2X2) => 13 14 23 24 -{1,2}{3,4}{5,6} => (2X2X2) => 135 136 145 146 235 236 245 246 -``` - -But add an element to a set, and we get a n-fold Cartesian product with `O(n^c)` complexity: - -``` -{1,2,3}{4,5,6}{7,8,9} => (3X3X3) => 147 148 149 157 158 159 167 168 169 247 248 - 249 257 258 259 267 268 269 347 348 349 357 - 358 359 367 368 369 -``` - -Now, imagine how this complexity grows given that each element is a n-tuple: - -``` -{1..100}{1..100} => (100X100) => 10,000 elements (38.4 kB) -{1..100}{1..100}{1..100} => (100X100X100) => 1,000,000 elements (5.76 MB) -``` - -Although these examples are clearly contrived, they demonstrate how brace patterns can quickly grow out of control. - -**More information** - -Interested in learning more about brace expansion? - -- [linuxjournal/bash-brace-expansion](http://www.linuxjournal.com/content/bash-brace-expansion) -- [rosettacode/Brace_expansion](https://rosettacode.org/wiki/Brace_expansion) -- [cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) - -
- -## Performance - -Braces is not only screaming fast, it's also more accurate the other brace expansion libraries. - -### Better algorithms - -Fortunately there is a solution to the ["brace bomb" problem](#brace-matching-pitfalls): _don't expand brace patterns into an array when they're used for matching_. - -Instead, convert the pattern into an optimized regular expression. This is easier said than done, and braces is the only library that does this currently. - -**The proof is in the numbers** - -Minimatch gets exponentially slower as patterns increase in complexity, braces does not. The following results were generated using `braces()` and `minimatch.braceExpand()`, respectively. - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------------------- | ------------------- | ---------------------------- | -| `{1..9007199254740991}`[^1] | `298 B` (5ms 459μs) | N/A (freezes) | -| `{1..1000000000000000}` | `41 B` (1ms 15μs) | N/A (freezes) | -| `{1..100000000000000}` | `40 B` (890μs) | N/A (freezes) | -| `{1..10000000000000}` | `39 B` (2ms 49μs) | N/A (freezes) | -| `{1..1000000000000}` | `38 B` (608μs) | N/A (freezes) | -| `{1..100000000000}` | `37 B` (397μs) | N/A (freezes) | -| `{1..10000000000}` | `35 B` (983μs) | N/A (freezes) | -| `{1..1000000000}` | `34 B` (798μs) | N/A (freezes) | -| `{1..100000000}` | `33 B` (733μs) | N/A (freezes) | -| `{1..10000000}` | `32 B` (5ms 632μs) | `78.89 MB` (16s 388ms 569μs) | -| `{1..1000000}` | `31 B` (1ms 381μs) | `6.89 MB` (1s 496ms 887μs) | -| `{1..100000}` | `30 B` (950μs) | `588.89 kB` (146ms 921μs) | -| `{1..10000}` | `29 B` (1ms 114μs) | `48.89 kB` (14ms 187μs) | -| `{1..1000}` | `28 B` (760μs) | `3.89 kB` (1ms 453μs) | -| `{1..100}` | `22 B` (345μs) | `291 B` (196μs) | -| `{1..10}` | `10 B` (533μs) | `20 B` (37μs) | -| `{1..3}` | `7 B` (190μs) | `5 B` (27μs) | - -### Faster algorithms - -When you need expansion, braces is still much faster. - -_(the following results were generated using `braces.expand()` and `minimatch.braceExpand()`, respectively)_ - -| **Pattern** | **braces** | **[minimatch][]** | -| --------------- | --------------------------- | ---------------------------- | -| `{1..10000000}` | `78.89 MB` (2s 698ms 642μs) | `78.89 MB` (18s 601ms 974μs) | -| `{1..1000000}` | `6.89 MB` (458ms 576μs) | `6.89 MB` (1s 491ms 621μs) | -| `{1..100000}` | `588.89 kB` (20ms 728μs) | `588.89 kB` (156ms 919μs) | -| `{1..10000}` | `48.89 kB` (2ms 202μs) | `48.89 kB` (13ms 641μs) | -| `{1..1000}` | `3.89 kB` (1ms 796μs) | `3.89 kB` (1ms 958μs) | -| `{1..100}` | `291 B` (424μs) | `291 B` (211μs) | -| `{1..10}` | `20 B` (487μs) | `20 B` (72μs) | -| `{1..3}` | `5 B` (166μs) | `5 B` (27μs) | - -If you'd like to run these comparisons yourself, see [test/support/generate.js](test/support/generate.js). - -## Benchmarks - -### Running benchmarks - -Install dev dependencies: - -```bash -npm i -d && npm benchmark -``` - -### Latest results - -Braces is more accurate, without sacrificing performance. - -```bash -● expand - range (expanded) - braces x 53,167 ops/sec ±0.12% (102 runs sampled) - minimatch x 11,378 ops/sec ±0.10% (102 runs sampled) -● expand - range (optimized for regex) - braces x 373,442 ops/sec ±0.04% (100 runs sampled) - minimatch x 3,262 ops/sec ±0.18% (100 runs sampled) -● expand - nested ranges (expanded) - braces x 33,921 ops/sec ±0.09% (99 runs sampled) - minimatch x 10,855 ops/sec ±0.28% (100 runs sampled) -● expand - nested ranges (optimized for regex) - braces x 287,479 ops/sec ±0.52% (98 runs sampled) - minimatch x 3,219 ops/sec ±0.28% (101 runs sampled) -● expand - set (expanded) - braces x 238,243 ops/sec ±0.19% (97 runs sampled) - minimatch x 538,268 ops/sec ±0.31% (96 runs sampled) -● expand - set (optimized for regex) - braces x 321,844 ops/sec ±0.10% (97 runs sampled) - minimatch x 140,600 ops/sec ±0.15% (100 runs sampled) -● expand - nested sets (expanded) - braces x 165,371 ops/sec ±0.42% (96 runs sampled) - minimatch x 337,720 ops/sec ±0.28% (100 runs sampled) -● expand - nested sets (optimized for regex) - braces x 242,948 ops/sec ±0.12% (99 runs sampled) - minimatch x 87,403 ops/sec ±0.79% (96 runs sampled) -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| ----------- | ------------------------------------------------------------- | -| 197 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [doowb](https://github.com/doowb) | -| 1 | [es128](https://github.com/es128) | -| 1 | [eush77](https://github.com/eush77) | -| 1 | [hemanth](https://github.com/hemanth) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -- [GitHub Profile](https://github.com/jonschlinkert) -- [Twitter Profile](https://twitter.com/jonschlinkert) -- [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - ---- - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js deleted file mode 100644 index d222c13..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/index.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict'; - -const stringify = require('./lib/stringify'); -const compile = require('./lib/compile'); -const expand = require('./lib/expand'); -const parse = require('./lib/parse'); - -/** - * Expand the given pattern or create a regex-compatible string. - * - * ```js - * const braces = require('braces'); - * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)'] - * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c'] - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {String} - * @api public - */ - -const braces = (input, options = {}) => { - let output = []; - - if (Array.isArray(input)) { - for (const pattern of input) { - const result = braces.create(pattern, options); - if (Array.isArray(result)) { - output.push(...result); - } else { - output.push(result); - } - } - } else { - output = [].concat(braces.create(input, options)); - } - - if (options && options.expand === true && options.nodupes === true) { - output = [...new Set(output)]; - } - return output; -}; - -/** - * Parse the given `str` with the given `options`. - * - * ```js - * // braces.parse(pattern, [, options]); - * const ast = braces.parse('a/{b,c}/d'); - * console.log(ast); - * ``` - * @param {String} pattern Brace pattern to parse - * @param {Object} options - * @return {Object} Returns an AST - * @api public - */ - -braces.parse = (input, options = {}) => parse(input, options); - -/** - * Creates a braces string from an AST, or an AST node. - * - * ```js - * const braces = require('braces'); - * let ast = braces.parse('foo/{a,b}/bar'); - * console.log(stringify(ast.nodes[2])); //=> '{a,b}' - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.stringify = (input, options = {}) => { - if (typeof input === 'string') { - return stringify(braces.parse(input, options), options); - } - return stringify(input, options); -}; - -/** - * Compiles a brace pattern into a regex-compatible, optimized string. - * This method is called by the main [braces](#braces) function by default. - * - * ```js - * const braces = require('braces'); - * console.log(braces.compile('a/{b,c}/d')); - * //=> ['a/(b|c)/d'] - * ``` - * @param {String} `input` Brace pattern or AST. - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.compile = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - return compile(input, options); -}; - -/** - * Expands a brace pattern into an array. This method is called by the - * main [braces](#braces) function when `options.expand` is true. Before - * using this method it's recommended that you read the [performance notes](#performance)) - * and advantages of using [.compile](#compile) instead. - * - * ```js - * const braces = require('braces'); - * console.log(braces.expand('a/{b,c}/d')); - * //=> ['a/b/d', 'a/c/d']; - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.expand = (input, options = {}) => { - if (typeof input === 'string') { - input = braces.parse(input, options); - } - - let result = expand(input, options); - - // filter out empty strings if specified - if (options.noempty === true) { - result = result.filter(Boolean); - } - - // filter out duplicates if specified - if (options.nodupes === true) { - result = [...new Set(result)]; - } - - return result; -}; - -/** - * Processes a brace pattern and returns either an expanded array - * (if `options.expand` is true), a highly optimized regex-compatible string. - * This method is called by the main [braces](#braces) function. - * - * ```js - * const braces = require('braces'); - * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}')) - * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)' - * ``` - * @param {String} `pattern` Brace pattern - * @param {Object} `options` - * @return {Array} Returns an array of expanded values. - * @api public - */ - -braces.create = (input, options = {}) => { - if (input === '' || input.length < 3) { - return [input]; - } - - return options.expand !== true - ? braces.compile(input, options) - : braces.expand(input, options); -}; - -/** - * Expose "braces" - */ - -module.exports = braces; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js deleted file mode 100644 index dce69be..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/compile.js +++ /dev/null @@ -1,60 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const utils = require('./utils'); - -const compile = (ast, options = {}) => { - const walk = (node, parent = {}) => { - const invalidBlock = utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - const invalid = invalidBlock === true || invalidNode === true; - const prefix = options.escapeInvalid === true ? '\\' : ''; - let output = ''; - - if (node.isOpen === true) { - return prefix + node.value; - } - - if (node.isClose === true) { - console.log('node.isClose', prefix, node.value); - return prefix + node.value; - } - - if (node.type === 'open') { - return invalid ? prefix + node.value : '('; - } - - if (node.type === 'close') { - return invalid ? prefix + node.value : ')'; - } - - if (node.type === 'comma') { - return node.prev.type === 'comma' ? '' : invalid ? node.value : '|'; - } - - if (node.value) { - return node.value; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - const range = fill(...args, { ...options, wrap: false, toRegex: true, strictZeros: true }); - - if (range.length !== 0) { - return args.length > 1 && range.length > 1 ? `(${range})` : range; - } - } - - if (node.nodes) { - for (const child of node.nodes) { - output += walk(child, node); - } - } - - return output; - }; - - return walk(ast); -}; - -module.exports = compile; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js deleted file mode 100644 index 2bb3b88..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/constants.js +++ /dev/null @@ -1,57 +0,0 @@ -'use strict'; - -module.exports = { - MAX_LENGTH: 10000, - - // Digits - CHAR_0: '0', /* 0 */ - CHAR_9: '9', /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 'A', /* A */ - CHAR_LOWERCASE_A: 'a', /* a */ - CHAR_UPPERCASE_Z: 'Z', /* Z */ - CHAR_LOWERCASE_Z: 'z', /* z */ - - CHAR_LEFT_PARENTHESES: '(', /* ( */ - CHAR_RIGHT_PARENTHESES: ')', /* ) */ - - CHAR_ASTERISK: '*', /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: '&', /* & */ - CHAR_AT: '@', /* @ */ - CHAR_BACKSLASH: '\\', /* \ */ - CHAR_BACKTICK: '`', /* ` */ - CHAR_CARRIAGE_RETURN: '\r', /* \r */ - CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */ - CHAR_COLON: ':', /* : */ - CHAR_COMMA: ',', /* , */ - CHAR_DOLLAR: '$', /* . */ - CHAR_DOT: '.', /* . */ - CHAR_DOUBLE_QUOTE: '"', /* " */ - CHAR_EQUAL: '=', /* = */ - CHAR_EXCLAMATION_MARK: '!', /* ! */ - CHAR_FORM_FEED: '\f', /* \f */ - CHAR_FORWARD_SLASH: '/', /* / */ - CHAR_HASH: '#', /* # */ - CHAR_HYPHEN_MINUS: '-', /* - */ - CHAR_LEFT_ANGLE_BRACKET: '<', /* < */ - CHAR_LEFT_CURLY_BRACE: '{', /* { */ - CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */ - CHAR_LINE_FEED: '\n', /* \n */ - CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */ - CHAR_PERCENT: '%', /* % */ - CHAR_PLUS: '+', /* + */ - CHAR_QUESTION_MARK: '?', /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */ - CHAR_RIGHT_CURLY_BRACE: '}', /* } */ - CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */ - CHAR_SEMICOLON: ';', /* ; */ - CHAR_SINGLE_QUOTE: '\'', /* ' */ - CHAR_SPACE: ' ', /* */ - CHAR_TAB: '\t', /* \t */ - CHAR_UNDERSCORE: '_', /* _ */ - CHAR_VERTICAL_LINE: '|', /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */ -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js deleted file mode 100644 index 35b2c41..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/expand.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict'; - -const fill = require('fill-range'); -const stringify = require('./stringify'); -const utils = require('./utils'); - -const append = (queue = '', stash = '', enclose = false) => { - const result = []; - - queue = [].concat(queue); - stash = [].concat(stash); - - if (!stash.length) return queue; - if (!queue.length) { - return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash; - } - - for (const item of queue) { - if (Array.isArray(item)) { - for (const value of item) { - result.push(append(value, stash, enclose)); - } - } else { - for (let ele of stash) { - if (enclose === true && typeof ele === 'string') ele = `{${ele}}`; - result.push(Array.isArray(ele) ? append(item, ele, enclose) : item + ele); - } - } - } - return utils.flatten(result); -}; - -const expand = (ast, options = {}) => { - const rangeLimit = options.rangeLimit === undefined ? 1000 : options.rangeLimit; - - const walk = (node, parent = {}) => { - node.queue = []; - - let p = parent; - let q = parent.queue; - - while (p.type !== 'brace' && p.type !== 'root' && p.parent) { - p = p.parent; - q = p.queue; - } - - if (node.invalid || node.dollar) { - q.push(append(q.pop(), stringify(node, options))); - return; - } - - if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) { - q.push(append(q.pop(), ['{}'])); - return; - } - - if (node.nodes && node.ranges > 0) { - const args = utils.reduce(node.nodes); - - if (utils.exceedsLimit(...args, options.step, rangeLimit)) { - throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.'); - } - - let range = fill(...args, options); - if (range.length === 0) { - range = stringify(node, options); - } - - q.push(append(q.pop(), range)); - node.nodes = []; - return; - } - - const enclose = utils.encloseBrace(node); - let queue = node.queue; - let block = node; - - while (block.type !== 'brace' && block.type !== 'root' && block.parent) { - block = block.parent; - queue = block.queue; - } - - for (let i = 0; i < node.nodes.length; i++) { - const child = node.nodes[i]; - - if (child.type === 'comma' && node.type === 'brace') { - if (i === 1) queue.push(''); - queue.push(''); - continue; - } - - if (child.type === 'close') { - q.push(append(q.pop(), queue, enclose)); - continue; - } - - if (child.value && child.type !== 'open') { - queue.push(append(queue.pop(), child.value)); - continue; - } - - if (child.nodes) { - walk(child, node); - } - } - - return queue; - }; - - return utils.flatten(walk(ast)); -}; - -module.exports = expand; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js deleted file mode 100644 index 3a6988e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/parse.js +++ /dev/null @@ -1,331 +0,0 @@ -'use strict'; - -const stringify = require('./stringify'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - CHAR_BACKSLASH, /* \ */ - CHAR_BACKTICK, /* ` */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_RIGHT_SQUARE_BRACKET, /* ] */ - CHAR_DOUBLE_QUOTE, /* " */ - CHAR_SINGLE_QUOTE, /* ' */ - CHAR_NO_BREAK_SPACE, - CHAR_ZERO_WIDTH_NOBREAK_SPACE -} = require('./constants'); - -/** - * parse - */ - -const parse = (input, options = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - const opts = options || {}; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - if (input.length > max) { - throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`); - } - - const ast = { type: 'root', input, nodes: [] }; - const stack = [ast]; - let block = ast; - let prev = ast; - let brackets = 0; - const length = input.length; - let index = 0; - let depth = 0; - let value; - - /** - * Helpers - */ - - const advance = () => input[index++]; - const push = node => { - if (node.type === 'text' && prev.type === 'dot') { - prev.type = 'text'; - } - - if (prev && prev.type === 'text' && node.type === 'text') { - prev.value += node.value; - return; - } - - block.nodes.push(node); - node.parent = block; - node.prev = prev; - prev = node; - return node; - }; - - push({ type: 'bos' }); - - while (index < length) { - block = stack[stack.length - 1]; - value = advance(); - - /** - * Invalid chars - */ - - if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) { - continue; - } - - /** - * Escaped chars - */ - - if (value === CHAR_BACKSLASH) { - push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() }); - continue; - } - - /** - * Right square bracket (literal): ']' - */ - - if (value === CHAR_RIGHT_SQUARE_BRACKET) { - push({ type: 'text', value: '\\' + value }); - continue; - } - - /** - * Left square bracket: '[' - */ - - if (value === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - - let next; - - while (index < length && (next = advance())) { - value += next; - - if (next === CHAR_LEFT_SQUARE_BRACKET) { - brackets++; - continue; - } - - if (next === CHAR_BACKSLASH) { - value += advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - brackets--; - - if (brackets === 0) { - break; - } - } - } - - push({ type: 'text', value }); - continue; - } - - /** - * Parentheses - */ - - if (value === CHAR_LEFT_PARENTHESES) { - block = push({ type: 'paren', nodes: [] }); - stack.push(block); - push({ type: 'text', value }); - continue; - } - - if (value === CHAR_RIGHT_PARENTHESES) { - if (block.type !== 'paren') { - push({ type: 'text', value }); - continue; - } - block = stack.pop(); - push({ type: 'text', value }); - block = stack[stack.length - 1]; - continue; - } - - /** - * Quotes: '|"|` - */ - - if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) { - const open = value; - let next; - - if (options.keepQuotes !== true) { - value = ''; - } - - while (index < length && (next = advance())) { - if (next === CHAR_BACKSLASH) { - value += next + advance(); - continue; - } - - if (next === open) { - if (options.keepQuotes === true) value += next; - break; - } - - value += next; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Left curly brace: '{' - */ - - if (value === CHAR_LEFT_CURLY_BRACE) { - depth++; - - const dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true; - const brace = { - type: 'brace', - open: true, - close: false, - dollar, - depth, - commas: 0, - ranges: 0, - nodes: [] - }; - - block = push(brace); - stack.push(block); - push({ type: 'open', value }); - continue; - } - - /** - * Right curly brace: '}' - */ - - if (value === CHAR_RIGHT_CURLY_BRACE) { - if (block.type !== 'brace') { - push({ type: 'text', value }); - continue; - } - - const type = 'close'; - block = stack.pop(); - block.close = true; - - push({ type, value }); - depth--; - - block = stack[stack.length - 1]; - continue; - } - - /** - * Comma: ',' - */ - - if (value === CHAR_COMMA && depth > 0) { - if (block.ranges > 0) { - block.ranges = 0; - const open = block.nodes.shift(); - block.nodes = [open, { type: 'text', value: stringify(block) }]; - } - - push({ type: 'comma', value }); - block.commas++; - continue; - } - - /** - * Dot: '.' - */ - - if (value === CHAR_DOT && depth > 0 && block.commas === 0) { - const siblings = block.nodes; - - if (depth === 0 || siblings.length === 0) { - push({ type: 'text', value }); - continue; - } - - if (prev.type === 'dot') { - block.range = []; - prev.value += value; - prev.type = 'range'; - - if (block.nodes.length !== 3 && block.nodes.length !== 5) { - block.invalid = true; - block.ranges = 0; - prev.type = 'text'; - continue; - } - - block.ranges++; - block.args = []; - continue; - } - - if (prev.type === 'range') { - siblings.pop(); - - const before = siblings[siblings.length - 1]; - before.value += prev.value + value; - prev = before; - block.ranges--; - continue; - } - - push({ type: 'dot', value }); - continue; - } - - /** - * Text - */ - - push({ type: 'text', value }); - } - - // Mark imbalanced braces and brackets as invalid - do { - block = stack.pop(); - - if (block.type !== 'root') { - block.nodes.forEach(node => { - if (!node.nodes) { - if (node.type === 'open') node.isOpen = true; - if (node.type === 'close') node.isClose = true; - if (!node.nodes) node.type = 'text'; - node.invalid = true; - } - }); - - // get the location of the block on parent.nodes (block's siblings) - const parent = stack[stack.length - 1]; - const index = parent.nodes.indexOf(block); - // replace the (invalid) block with it's nodes - parent.nodes.splice(index, 1, ...block.nodes); - } - } while (stack.length > 0); - - push({ type: 'eos' }); - return ast; -}; - -module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js deleted file mode 100644 index 8bcf872..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/stringify.js +++ /dev/null @@ -1,32 +0,0 @@ -'use strict'; - -const utils = require('./utils'); - -module.exports = (ast, options = {}) => { - const stringify = (node, parent = {}) => { - const invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent); - const invalidNode = node.invalid === true && options.escapeInvalid === true; - let output = ''; - - if (node.value) { - if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) { - return '\\' + node.value; - } - return node.value; - } - - if (node.value) { - return node.value; - } - - if (node.nodes) { - for (const child of node.nodes) { - output += stringify(child); - } - } - return output; - }; - - return stringify(ast); -}; - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js deleted file mode 100644 index d19311f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/lib/utils.js +++ /dev/null @@ -1,122 +0,0 @@ -'use strict'; - -exports.isInteger = num => { - if (typeof num === 'number') { - return Number.isInteger(num); - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isInteger(Number(num)); - } - return false; -}; - -/** - * Find a node of the given type - */ - -exports.find = (node, type) => node.nodes.find(node => node.type === type); - -/** - * Find a node of the given type - */ - -exports.exceedsLimit = (min, max, step = 1, limit) => { - if (limit === false) return false; - if (!exports.isInteger(min) || !exports.isInteger(max)) return false; - return ((Number(max) - Number(min)) / Number(step)) >= limit; -}; - -/** - * Escape the given node with '\\' before node.value - */ - -exports.escapeNode = (block, n = 0, type) => { - const node = block.nodes[n]; - if (!node) return; - - if ((type && node.type === type) || node.type === 'open' || node.type === 'close') { - if (node.escaped !== true) { - node.value = '\\' + node.value; - node.escaped = true; - } - } -}; - -/** - * Returns true if the given brace node should be enclosed in literal braces - */ - -exports.encloseBrace = node => { - if (node.type !== 'brace') return false; - if ((node.commas >> 0 + node.ranges >> 0) === 0) { - node.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a brace node is invalid. - */ - -exports.isInvalidBrace = block => { - if (block.type !== 'brace') return false; - if (block.invalid === true || block.dollar) return true; - if ((block.commas >> 0 + block.ranges >> 0) === 0) { - block.invalid = true; - return true; - } - if (block.open !== true || block.close !== true) { - block.invalid = true; - return true; - } - return false; -}; - -/** - * Returns true if a node is an open or close node - */ - -exports.isOpenOrClose = node => { - if (node.type === 'open' || node.type === 'close') { - return true; - } - return node.open === true || node.close === true; -}; - -/** - * Reduce an array of text nodes. - */ - -exports.reduce = nodes => nodes.reduce((acc, node) => { - if (node.type === 'text') acc.push(node.value); - if (node.type === 'range') node.type = 'text'; - return acc; -}, []); - -/** - * Flatten an array - */ - -exports.flatten = (...args) => { - const result = []; - - const flat = arr => { - for (let i = 0; i < arr.length; i++) { - const ele = arr[i]; - - if (Array.isArray(ele)) { - flat(ele); - continue; - } - - if (ele !== undefined) { - result.push(ele); - } - } - return result; - }; - - flat(args); - return result; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json deleted file mode 100644 index c3c056e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/braces/package.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "braces", - "description": "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.", - "version": "3.0.3", - "homepage": "https://github.com/micromatch/braces", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Elan Shanker (https://github.com/es128)", - "Eugene Sharygin (https://github.com/eush77)", - "hemanth.hm (http://h3manth.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/braces", - "bugs": { - "url": "https://github.com/micromatch/braces/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "mocha", - "benchmark": "node benchmark" - }, - "dependencies": { - "fill-range": "^7.1.1" - }, - "devDependencies": { - "ansi-colors": "^3.2.4", - "bash-path": "^2.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1" - }, - "keywords": [ - "alpha", - "alphabetical", - "bash", - "brace", - "braces", - "expand", - "expansion", - "filepath", - "fill", - "fs", - "glob", - "globbing", - "letter", - "match", - "matches", - "matching", - "number", - "numerical", - "path", - "range", - "ranges", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "lint": { - "reflinks": true - }, - "plugins": [ - "gulp-format-md" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE deleted file mode 100644 index 8407b9a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Made With MOXY Lda - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md deleted file mode 100644 index 1ed9252..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/README.md +++ /dev/null @@ -1,89 +0,0 @@ -# cross-spawn - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][ci-image]][ci-url] [![Build status][appveyor-image]][appveyor-url] - -[npm-url]:https://npmjs.org/package/cross-spawn -[downloads-image]:https://img.shields.io/npm/dm/cross-spawn.svg -[npm-image]:https://img.shields.io/npm/v/cross-spawn.svg -[ci-url]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml -[ci-image]:https://github.com/moxystudio/node-cross-spawn/actions/workflows/ci.yaml/badge.svg -[appveyor-url]:https://ci.appveyor.com/project/satazor/node-cross-spawn -[appveyor-image]:https://img.shields.io/appveyor/ci/satazor/node-cross-spawn/master.svg - -A cross platform solution to node's spawn and spawnSync. - -## Installation - -Node.js version 8 and up: -`$ npm install cross-spawn` - -Node.js version 7 and under: -`$ npm install cross-spawn@6` - -## Why - -Node has issues when using spawn on Windows: - -- It ignores [PATHEXT](https://github.com/joyent/node/issues/2318) -- It does not support [shebangs](https://en.wikipedia.org/wiki/Shebang_(Unix)) -- Has problems running commands with [spaces](https://github.com/nodejs/node/issues/7367) -- Has problems running commands with posix relative paths (e.g.: `./my-folder/my-executable`) -- Has an [issue](https://github.com/moxystudio/node-cross-spawn/issues/82) with command shims (files in `node_modules/.bin/`), where arguments with quotes and parenthesis would result in [invalid syntax error](https://github.com/moxystudio/node-cross-spawn/blob/e77b8f22a416db46b6196767bcd35601d7e11d54/test/index.test.js#L149) -- No `options.shell` support on node `` where `` must not contain any arguments. -If you would like to have the shebang support improved, feel free to contribute via a pull-request. - -Remember to always test your code on Windows! - - -## Tests - -`$ npm test` -`$ npm test -- --watch` during development - - -## License - -Released under the [MIT License](https://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js deleted file mode 100644 index 5509742..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/index.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -const cp = require('child_process'); -const parse = require('./lib/parse'); -const enoent = require('./lib/enoent'); - -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); - - return spawned; -} - -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); - - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); - - return result; -} - -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; - -module.exports._parse = parse; -module.exports._enoent = enoent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js deleted file mode 100644 index da33471..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/enoent.js +++ /dev/null @@ -1,59 +0,0 @@ -'use strict'; - -const isWin = process.platform === 'win32'; - -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); -} - -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } - - const originalEmit = cp.emit; - - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed); - - if (err) { - return originalEmit.call(cp, 'error', err); - } - } - - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} - -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } - - return null; -} - -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); - } - - return null; -} - -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js deleted file mode 100644 index 0129d74..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/parse.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -const path = require('path'); -const resolveCommand = require('./util/resolveCommand'); -const escape = require('./util/escape'); -const readShebang = require('./util/readShebang'); - -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; - -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); - - const shebang = parsed.file && readShebang(parsed.file); - - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; - - return resolveCommand(parsed); - } - - return parsed.file; -} - -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } - - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); - - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); - - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); - - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } - - return parsed; -} - -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } - - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original - - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; - - // Delegate further parsing to shell or non-shell - return options.shell ? parsed : parseNonShell(parsed); -} - -module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js deleted file mode 100644 index 7bf2905..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/escape.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - return arg; -} - -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input - // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); - - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); - - // All other backslashes occur literally - - // Quote the whole thing: - arg = `"${arg}"`; - - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } - - return arg; -} - -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js deleted file mode 100644 index 5e83733..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/readShebang.js +++ /dev/null @@ -1,23 +0,0 @@ -'use strict'; - -const fs = require('fs'); -const shebangCommand = require('shebang-command'); - -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - const buffer = Buffer.alloc(size); - - let fd; - - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } - - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); -} - -module.exports = readShebang; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js deleted file mode 100644 index 7972455..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/lib/util/resolveCommand.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; - -const path = require('path'); -const which = require('which'); -const getPathKey = require('path-key'); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const env = parsed.options.env || process.env; - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - // Worker threads do not have process.chdir() - const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (shouldSwitchCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } - - let resolved; - - try { - resolved = which.sync(parsed.command, { - path: env[getPathKey({ env })], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - if (shouldSwitchCwd) { - process.chdir(cwd); - } - } - - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } - - return resolved; -} - -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} - -module.exports = resolveCommand; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json deleted file mode 100644 index 24b2eb4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/cross-spawn/package.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "name": "cross-spawn", - "version": "7.0.6", - "description": "Cross platform child_process#spawn and child_process#spawnSync", - "keywords": [ - "spawn", - "spawnSync", - "windows", - "cross-platform", - "path-ext", - "shebang", - "cmd", - "execute" - ], - "author": "André Cruz ", - "homepage": "https://github.com/moxystudio/node-cross-spawn", - "repository": { - "type": "git", - "url": "git@github.com:moxystudio/node-cross-spawn.git" - }, - "license": "MIT", - "main": "index.js", - "files": [ - "lib" - ], - "scripts": { - "lint": "eslint .", - "test": "jest --env node --coverage", - "prerelease": "npm t && npm run lint", - "release": "standard-version", - "postrelease": "git push --follow-tags origin HEAD && npm publish" - }, - "husky": { - "hooks": { - "commit-msg": "commitlint -E HUSKY_GIT_PARAMS", - "pre-commit": "lint-staged" - } - }, - "lint-staged": { - "*.js": [ - "eslint --fix", - "git add" - ] - }, - "commitlint": { - "extends": [ - "@commitlint/config-conventional" - ] - }, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "devDependencies": { - "@commitlint/cli": "^8.1.0", - "@commitlint/config-conventional": "^8.1.0", - "babel-core": "^6.26.3", - "babel-jest": "^24.9.0", - "babel-preset-moxy": "^3.1.0", - "eslint": "^5.16.0", - "eslint-config-moxy": "^7.1.0", - "husky": "^3.0.5", - "jest": "^24.9.0", - "lint-staged": "^9.2.5", - "mkdirp": "^0.5.1", - "rimraf": "^3.0.0", - "standard-version": "^9.5.0" - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts deleted file mode 100644 index 417d535..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.d.ts +++ /dev/null @@ -1,564 +0,0 @@ -/// -import {ChildProcess} from 'child_process'; -import {Stream, Readable as ReadableStream} from 'stream'; - -declare namespace execa { - type StdioOption = - | 'pipe' - | 'ipc' - | 'ignore' - | 'inherit' - | Stream - | number - | undefined; - - interface CommonOptions { - /** - Kill the spawned process when the parent process exits unless either: - - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) - - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit - - @default true - */ - readonly cleanup?: boolean; - - /** - Prefer locally installed binaries when looking for a binary to execute. - - If you `$ npm install foo`, you can then `execa('foo')`. - - @default false - */ - readonly preferLocal?: boolean; - - /** - Preferred path to find locally installed binaries in (use with `preferLocal`). - - @default process.cwd() - */ - readonly localDir?: string; - - /** - Path to the Node.js executable to use in child processes. - - This can be either an absolute path or a path relative to the `cwd` option. - - Requires `preferLocal` to be `true`. - - For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. - - @default process.execPath - */ - readonly execPath?: string; - - /** - Buffer the output from the spawned process. When set to `false`, you must read the output of `stdout` and `stderr` (or `all` if the `all` option is `true`). Otherwise the returned promise will not be resolved/rejected. - - If the spawned process fails, `error.stdout`, `error.stderr`, and `error.all` will contain the buffered data. - - @default true - */ - readonly buffer?: boolean; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stdin?: StdioOption; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stdout?: StdioOption; - - /** - Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - - @default 'pipe' - */ - readonly stderr?: StdioOption; - - /** - Setting this to `false` resolves the promise with the error instead of rejecting it. - - @default true - */ - readonly reject?: boolean; - - /** - Add an `.all` property on the promise and the resolved value. The property contains the output of the process with `stdout` and `stderr` interleaved. - - @default false - */ - readonly all?: boolean; - - /** - Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. - - @default true - */ - readonly stripFinalNewline?: boolean; - - /** - Set to `false` if you don't want to extend the environment variables when providing the `env` property. - - @default true - */ - readonly extendEnv?: boolean; - - /** - Current working directory of the child process. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - Environment key-value pairs. Extends automatically from `process.env`. Set `extendEnv` to `false` if you don't want this. - - @default process.env - */ - readonly env?: NodeJS.ProcessEnv; - - /** - Explicitly set the value of `argv[0]` sent to the child process. This will be set to `command` or `file` if not specified. - */ - readonly argv0?: string; - - /** - Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. - - @default 'pipe' - */ - readonly stdio?: 'pipe' | 'ignore' | 'inherit' | readonly StdioOption[]; - - /** - Specify the kind of serialization used for sending messages between processes when using the `stdio: 'ipc'` option or `execa.node()`: - - `json`: Uses `JSON.stringify()` and `JSON.parse()`. - - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) - - Requires Node.js `13.2.0` or later. - - [More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) - - @default 'json' - */ - readonly serialization?: 'json' | 'advanced'; - - /** - Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). - - @default false - */ - readonly detached?: boolean; - - /** - Sets the user identity of the process. - */ - readonly uid?: number; - - /** - Sets the group identity of the process. - */ - readonly gid?: number; - - /** - If `true`, runs `command` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. - - We recommend against using this option since it is: - - not cross-platform, encouraging shell-specific syntax. - - slower, because of the additional shell interpretation. - - unsafe, potentially allowing command injection. - - @default false - */ - readonly shell?: boolean | string; - - /** - Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. - - @default 'utf8' - */ - readonly encoding?: EncodingType; - - /** - If `timeout` is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than `timeout` milliseconds. - - @default 0 - */ - readonly timeout?: number; - - /** - Largest amount of data in bytes allowed on `stdout` or `stderr`. Default: 100 MB. - - @default 100_000_000 - */ - readonly maxBuffer?: number; - - /** - Signal value to be used when the spawned process will be killed. - - @default 'SIGTERM' - */ - readonly killSignal?: string | number; - - /** - If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. - - @default false - */ - readonly windowsVerbatimArguments?: boolean; - - /** - On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. - - @default true - */ - readonly windowsHide?: boolean; - } - - interface Options extends CommonOptions { - /** - Write some input to the `stdin` of your binary. - */ - readonly input?: string | Buffer | ReadableStream; - } - - interface SyncOptions extends CommonOptions { - /** - Write some input to the `stdin` of your binary. - */ - readonly input?: string | Buffer; - } - - interface NodeOptions extends Options { - /** - The Node.js executable to use. - - @default process.execPath - */ - readonly nodePath?: string; - - /** - List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. - - @default process.execArgv - */ - readonly nodeOptions?: string[]; - } - - interface ExecaReturnBase { - /** - The file and arguments that were run, for logging purposes. - - This is not escaped and should not be executed directly as a process, including using `execa()` or `execa.command()`. - */ - command: string; - - /** - Same as `command` but escaped. - - This is meant to be copy and pasted into a shell, for debugging purposes. - Since the escaping is fairly basic, this should not be executed directly as a process, including using `execa()` or `execa.command()`. - */ - escapedCommand: string; - - /** - The numeric exit code of the process that was run. - */ - exitCode: number; - - /** - The output of the process on stdout. - */ - stdout: StdoutStderrType; - - /** - The output of the process on stderr. - */ - stderr: StdoutStderrType; - - /** - Whether the process failed to run. - */ - failed: boolean; - - /** - Whether the process timed out. - */ - timedOut: boolean; - - /** - Whether the process was killed. - */ - killed: boolean; - - /** - The name of the signal that was used to terminate the process. For example, `SIGFPE`. - - If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. - */ - signal?: string; - - /** - A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. - - If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. - */ - signalDescription?: string; - } - - interface ExecaSyncReturnValue - extends ExecaReturnBase { - } - - /** - Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. - - The child process fails when: - - its exit code is not `0` - - it was killed with a signal - - timing out - - being canceled - - there's not enough memory or there are already too many child processes - */ - interface ExecaReturnValue - extends ExecaSyncReturnValue { - /** - The output of the process with `stdout` and `stderr` interleaved. - - This is `undefined` if either: - - the `all` option is `false` (default value) - - `execa.sync()` was used - */ - all?: StdoutErrorType; - - /** - Whether the process was canceled. - */ - isCanceled: boolean; - } - - interface ExecaSyncError - extends Error, - ExecaReturnBase { - /** - Error message when the child process failed to run. In addition to the underlying error message, it also contains some information related to why the child process errored. - - The child process stderr then stdout are appended to the end, separated with newlines and not interleaved. - */ - message: string; - - /** - This is the same as the `message` property except it does not include the child process stdout/stderr. - */ - shortMessage: string; - - /** - Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. - - This is `undefined` unless the child process exited due to an `error` event or a timeout. - */ - originalMessage?: string; - } - - interface ExecaError - extends ExecaSyncError { - /** - The output of the process with `stdout` and `stderr` interleaved. - - This is `undefined` if either: - - the `all` option is `false` (default value) - - `execa.sync()` was used - */ - all?: StdoutErrorType; - - /** - Whether the process was canceled. - */ - isCanceled: boolean; - } - - interface KillOptions { - /** - Milliseconds to wait for the child process to terminate before sending `SIGKILL`. - - Can be disabled with `false`. - - @default 5000 - */ - forceKillAfterTimeout?: number | false; - } - - interface ExecaChildPromise { - /** - Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). - - This is `undefined` if either: - - the `all` option is `false` (the default value) - - both `stdout` and `stderr` options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) - */ - all?: ReadableStream; - - catch( - onRejected?: (reason: ExecaError) => ResultType | PromiseLike - ): Promise | ResultType>; - - /** - Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal), except if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. - */ - kill(signal?: string, options?: KillOptions): void; - - /** - Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. - */ - cancel(): void; - } - - type ExecaChildProcess = ChildProcess & - ExecaChildPromise & - Promise>; -} - -declare const execa: { - /** - Execute a file. - - Think of this as a mix of `child_process.execFile` and `child_process.spawn`. - - @param file - The program/script to execute. - @param arguments - Arguments to pass to `file` on execution. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - - @example - ``` - import execa = require('execa'); - - (async () => { - const {stdout} = await execa('echo', ['unicorns']); - console.log(stdout); - //=> 'unicorns' - - // Cancelling a spawned process - - const subprocess = execa('node'); - - setTimeout(() => { - subprocess.cancel() - }, 1000); - - try { - await subprocess; - } catch (error) { - console.log(subprocess.killed); // true - console.log(error.isCanceled); // true - } - })(); - - // Pipe the child process stdout to the current stdout - execa('echo', ['unicorns']).stdout.pipe(process.stdout); - ``` - */ - ( - file: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - ( - file: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - (file: string, options?: execa.Options): execa.ExecaChildProcess; - (file: string, options?: execa.Options): execa.ExecaChildProcess< - Buffer - >; - - /** - Execute a file synchronously. - - This method throws an `Error` if the command fails. - - @param file - The program/script to execute. - @param arguments - Arguments to pass to `file` on execution. - @returns A result `Object` with `stdout` and `stderr` properties. - */ - sync( - file: string, - arguments?: readonly string[], - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - sync( - file: string, - arguments?: readonly string[], - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - sync(file: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - sync( - file: string, - options?: execa.SyncOptions - ): execa.ExecaSyncReturnValue; - - /** - Same as `execa()` except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. - - If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. - - The `shell` option must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. - - @param command - The program/script to execute and its arguments. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - - @example - ``` - import execa = require('execa'); - - (async () => { - const {stdout} = await execa.command('echo unicorns'); - console.log(stdout); - //=> 'unicorns' - })(); - ``` - */ - command(command: string, options?: execa.Options): execa.ExecaChildProcess; - command(command: string, options?: execa.Options): execa.ExecaChildProcess; - - /** - Same as `execa.command()` but synchronous. - - @param command - The program/script to execute and its arguments. - @returns A result `Object` with `stdout` and `stderr` properties. - */ - commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - commandSync(command: string, options?: execa.SyncOptions): execa.ExecaSyncReturnValue; - - /** - Execute a Node.js script as a child process. - - Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): - - the current Node version and options are used. This can be overridden using the `nodePath` and `nodeArguments` options. - - the `shell` option cannot be used - - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) - - @param scriptPath - Node.js script to execute. - @param arguments - Arguments to pass to `scriptPath` on execution. - @returns A [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess), which is enhanced to also be a `Promise` for a result `Object` with `stdout` and `stderr` properties. - */ - node( - scriptPath: string, - arguments?: readonly string[], - options?: execa.NodeOptions - ): execa.ExecaChildProcess; - node( - scriptPath: string, - arguments?: readonly string[], - options?: execa.Options - ): execa.ExecaChildProcess; - node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; - node(scriptPath: string, options?: execa.Options): execa.ExecaChildProcess; -}; - -export = execa; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js deleted file mode 100644 index 6fc9f12..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/index.js +++ /dev/null @@ -1,268 +0,0 @@ -'use strict'; -const path = require('path'); -const childProcess = require('child_process'); -const crossSpawn = require('cross-spawn'); -const stripFinalNewline = require('strip-final-newline'); -const npmRunPath = require('npm-run-path'); -const onetime = require('onetime'); -const makeError = require('./lib/error'); -const normalizeStdio = require('./lib/stdio'); -const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = require('./lib/kill'); -const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = require('./lib/stream'); -const {mergePromise, getSpawnedPromise} = require('./lib/promise'); -const {joinCommand, parseCommand, getEscapedCommand} = require('./lib/command'); - -const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100; - -const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) => { - const env = extendEnv ? {...process.env, ...envOption} : envOption; - - if (preferLocal) { - return npmRunPath.env({env, cwd: localDir, execPath}); - } - - return env; -}; - -const handleArguments = (file, args, options = {}) => { - const parsed = crossSpawn._parse(file, args, options); - file = parsed.command; - args = parsed.args; - options = parsed.options; - - options = { - maxBuffer: DEFAULT_MAX_BUFFER, - buffer: true, - stripFinalNewline: true, - extendEnv: true, - preferLocal: false, - localDir: options.cwd || process.cwd(), - execPath: process.execPath, - encoding: 'utf8', - reject: true, - cleanup: true, - all: false, - windowsHide: true, - ...options - }; - - options.env = getEnv(options); - - options.stdio = normalizeStdio(options); - - if (process.platform === 'win32' && path.basename(file, '.exe') === 'cmd') { - // #116 - args.unshift('/q'); - } - - return {file, args, options, parsed}; -}; - -const handleOutput = (options, value, error) => { - if (typeof value !== 'string' && !Buffer.isBuffer(value)) { - // When `execa.sync()` errors, we normalize it to '' to mimic `execa()` - return error === undefined ? undefined : ''; - } - - if (options.stripFinalNewline) { - return stripFinalNewline(value); - } - - return value; -}; - -const execa = (file, args, options) => { - const parsed = handleArguments(file, args, options); - const command = joinCommand(file, args); - const escapedCommand = getEscapedCommand(file, args); - - validateTimeout(parsed.options); - - let spawned; - try { - spawned = childProcess.spawn(parsed.file, parsed.args, parsed.options); - } catch (error) { - // Ensure the returned error is always both a promise and a child process - const dummySpawned = new childProcess.ChildProcess(); - const errorPromise = Promise.reject(makeError({ - error, - stdout: '', - stderr: '', - all: '', - command, - escapedCommand, - parsed, - timedOut: false, - isCanceled: false, - killed: false - })); - return mergePromise(dummySpawned, errorPromise); - } - - const spawnedPromise = getSpawnedPromise(spawned); - const timedPromise = setupTimeout(spawned, parsed.options, spawnedPromise); - const processDone = setExitHandler(spawned, parsed.options, timedPromise); - - const context = {isCanceled: false}; - - spawned.kill = spawnedKill.bind(null, spawned.kill.bind(spawned)); - spawned.cancel = spawnedCancel.bind(null, spawned, context); - - const handlePromise = async () => { - const [{error, exitCode, signal, timedOut}, stdoutResult, stderrResult, allResult] = await getSpawnedResult(spawned, parsed.options, processDone); - const stdout = handleOutput(parsed.options, stdoutResult); - const stderr = handleOutput(parsed.options, stderrResult); - const all = handleOutput(parsed.options, allResult); - - if (error || exitCode !== 0 || signal !== null) { - const returnedError = makeError({ - error, - exitCode, - signal, - stdout, - stderr, - all, - command, - escapedCommand, - parsed, - timedOut, - isCanceled: context.isCanceled, - killed: spawned.killed - }); - - if (!parsed.options.reject) { - return returnedError; - } - - throw returnedError; - } - - return { - command, - escapedCommand, - exitCode: 0, - stdout, - stderr, - all, - failed: false, - timedOut: false, - isCanceled: false, - killed: false - }; - }; - - const handlePromiseOnce = onetime(handlePromise); - - handleInput(spawned, parsed.options.input); - - spawned.all = makeAllStream(spawned, parsed.options); - - return mergePromise(spawned, handlePromiseOnce); -}; - -module.exports = execa; - -module.exports.sync = (file, args, options) => { - const parsed = handleArguments(file, args, options); - const command = joinCommand(file, args); - const escapedCommand = getEscapedCommand(file, args); - - validateInputSync(parsed.options); - - let result; - try { - result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options); - } catch (error) { - throw makeError({ - error, - stdout: '', - stderr: '', - all: '', - command, - escapedCommand, - parsed, - timedOut: false, - isCanceled: false, - killed: false - }); - } - - const stdout = handleOutput(parsed.options, result.stdout, result.error); - const stderr = handleOutput(parsed.options, result.stderr, result.error); - - if (result.error || result.status !== 0 || result.signal !== null) { - const error = makeError({ - stdout, - stderr, - error: result.error, - signal: result.signal, - exitCode: result.status, - command, - escapedCommand, - parsed, - timedOut: result.error && result.error.code === 'ETIMEDOUT', - isCanceled: false, - killed: result.signal !== null - }); - - if (!parsed.options.reject) { - return error; - } - - throw error; - } - - return { - command, - escapedCommand, - exitCode: 0, - stdout, - stderr, - failed: false, - timedOut: false, - isCanceled: false, - killed: false - }; -}; - -module.exports.command = (command, options) => { - const [file, ...args] = parseCommand(command); - return execa(file, args, options); -}; - -module.exports.commandSync = (command, options) => { - const [file, ...args] = parseCommand(command); - return execa.sync(file, args, options); -}; - -module.exports.node = (scriptPath, args, options = {}) => { - if (args && !Array.isArray(args) && typeof args === 'object') { - options = args; - args = []; - } - - const stdio = normalizeStdio.node(options); - const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect')); - - const { - nodePath = process.execPath, - nodeOptions = defaultExecArgv - } = options; - - return execa( - nodePath, - [ - ...nodeOptions, - scriptPath, - ...(Array.isArray(args) ? args : []) - ], - { - ...options, - stdin: undefined, - stdout: undefined, - stderr: undefined, - stdio, - shell: false - } - ); -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js deleted file mode 100644 index 859b006..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/command.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const normalizeArgs = (file, args = []) => { - if (!Array.isArray(args)) { - return [file]; - } - - return [file, ...args]; -}; - -const NO_ESCAPE_REGEXP = /^[\w.-]+$/; -const DOUBLE_QUOTES_REGEXP = /"/g; - -const escapeArg = arg => { - if (typeof arg !== 'string' || NO_ESCAPE_REGEXP.test(arg)) { - return arg; - } - - return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`; -}; - -const joinCommand = (file, args) => { - return normalizeArgs(file, args).join(' '); -}; - -const getEscapedCommand = (file, args) => { - return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' '); -}; - -const SPACES_REGEXP = / +/g; - -// Handle `execa.command()` -const parseCommand = command => { - const tokens = []; - for (const token of command.trim().split(SPACES_REGEXP)) { - // Allow spaces to be escaped by a backslash if not meant as a delimiter - const previousToken = tokens[tokens.length - 1]; - if (previousToken && previousToken.endsWith('\\')) { - // Merge previous token with current one - tokens[tokens.length - 1] = `${previousToken.slice(0, -1)} ${token}`; - } else { - tokens.push(token); - } - } - - return tokens; -}; - -module.exports = { - joinCommand, - getEscapedCommand, - parseCommand -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js deleted file mode 100644 index 4214467..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/error.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict'; -const {signalsByName} = require('human-signals'); - -const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => { - if (timedOut) { - return `timed out after ${timeout} milliseconds`; - } - - if (isCanceled) { - return 'was canceled'; - } - - if (errorCode !== undefined) { - return `failed with ${errorCode}`; - } - - if (signal !== undefined) { - return `was killed with ${signal} (${signalDescription})`; - } - - if (exitCode !== undefined) { - return `failed with exit code ${exitCode}`; - } - - return 'failed'; -}; - -const makeError = ({ - stdout, - stderr, - all, - error, - signal, - exitCode, - command, - escapedCommand, - timedOut, - isCanceled, - killed, - parsed: {options: {timeout}} -}) => { - // `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`. - // We normalize them to `undefined` - exitCode = exitCode === null ? undefined : exitCode; - signal = signal === null ? undefined : signal; - const signalDescription = signal === undefined ? undefined : signalsByName[signal].description; - - const errorCode = error && error.code; - - const prefix = getErrorPrefix({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}); - const execaMessage = `Command ${prefix}: ${command}`; - const isError = Object.prototype.toString.call(error) === '[object Error]'; - const shortMessage = isError ? `${execaMessage}\n${error.message}` : execaMessage; - const message = [shortMessage, stderr, stdout].filter(Boolean).join('\n'); - - if (isError) { - error.originalMessage = error.message; - error.message = message; - } else { - error = new Error(message); - } - - error.shortMessage = shortMessage; - error.command = command; - error.escapedCommand = escapedCommand; - error.exitCode = exitCode; - error.signal = signal; - error.signalDescription = signalDescription; - error.stdout = stdout; - error.stderr = stderr; - - if (all !== undefined) { - error.all = all; - } - - if ('bufferedData' in error) { - delete error.bufferedData; - } - - error.failed = true; - error.timedOut = Boolean(timedOut); - error.isCanceled = isCanceled; - error.killed = killed && !timedOut; - - return error; -}; - -module.exports = makeError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js deleted file mode 100644 index 287a142..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/kill.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict'; -const os = require('os'); -const onExit = require('signal-exit'); - -const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5; - -// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior -const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => { - const killResult = kill(signal); - setKillTimeout(kill, signal, options, killResult); - return killResult; -}; - -const setKillTimeout = (kill, signal, options, killResult) => { - if (!shouldForceKill(signal, options, killResult)) { - return; - } - - const timeout = getForceKillAfterTimeout(options); - const t = setTimeout(() => { - kill('SIGKILL'); - }, timeout); - - // Guarded because there's no `.unref()` when `execa` is used in the renderer - // process in Electron. This cannot be tested since we don't run tests in - // Electron. - // istanbul ignore else - if (t.unref) { - t.unref(); - } -}; - -const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => { - return isSigterm(signal) && forceKillAfterTimeout !== false && killResult; -}; - -const isSigterm = signal => { - return signal === os.constants.signals.SIGTERM || - (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM'); -}; - -const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => { - if (forceKillAfterTimeout === true) { - return DEFAULT_FORCE_KILL_TIMEOUT; - } - - if (!Number.isFinite(forceKillAfterTimeout) || forceKillAfterTimeout < 0) { - throw new TypeError(`Expected the \`forceKillAfterTimeout\` option to be a non-negative integer, got \`${forceKillAfterTimeout}\` (${typeof forceKillAfterTimeout})`); - } - - return forceKillAfterTimeout; -}; - -// `childProcess.cancel()` -const spawnedCancel = (spawned, context) => { - const killResult = spawned.kill(); - - if (killResult) { - context.isCanceled = true; - } -}; - -const timeoutKill = (spawned, signal, reject) => { - spawned.kill(signal); - reject(Object.assign(new Error('Timed out'), {timedOut: true, signal})); -}; - -// `timeout` option handling -const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => { - if (timeout === 0 || timeout === undefined) { - return spawnedPromise; - } - - let timeoutId; - const timeoutPromise = new Promise((resolve, reject) => { - timeoutId = setTimeout(() => { - timeoutKill(spawned, killSignal, reject); - }, timeout); - }); - - const safeSpawnedPromise = spawnedPromise.finally(() => { - clearTimeout(timeoutId); - }); - - return Promise.race([timeoutPromise, safeSpawnedPromise]); -}; - -const validateTimeout = ({timeout}) => { - if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) { - throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`); - } -}; - -// `cleanup` option handling -const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => { - if (!cleanup || detached) { - return timedPromise; - } - - const removeExitHandler = onExit(() => { - spawned.kill(); - }); - - return timedPromise.finally(() => { - removeExitHandler(); - }); -}; - -module.exports = { - spawnedKill, - spawnedCancel, - setupTimeout, - validateTimeout, - setExitHandler -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js deleted file mode 100644 index bd9d523..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/promise.js +++ /dev/null @@ -1,46 +0,0 @@ -'use strict'; - -const nativePromisePrototype = (async () => {})().constructor.prototype; -const descriptors = ['then', 'catch', 'finally'].map(property => [ - property, - Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property) -]); - -// The return value is a mixin of `childProcess` and `Promise` -const mergePromise = (spawned, promise) => { - for (const [property, descriptor] of descriptors) { - // Starting the main `promise` is deferred to avoid consuming streams - const value = typeof promise === 'function' ? - (...args) => Reflect.apply(descriptor.value, promise(), args) : - descriptor.value.bind(promise); - - Reflect.defineProperty(spawned, property, {...descriptor, value}); - } - - return spawned; -}; - -// Use promises instead of `child_process` events -const getSpawnedPromise = spawned => { - return new Promise((resolve, reject) => { - spawned.on('exit', (exitCode, signal) => { - resolve({exitCode, signal}); - }); - - spawned.on('error', error => { - reject(error); - }); - - if (spawned.stdin) { - spawned.stdin.on('error', error => { - reject(error); - }); - } - }); -}; - -module.exports = { - mergePromise, - getSpawnedPromise -}; - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js deleted file mode 100644 index 45129ed..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stdio.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const aliases = ['stdin', 'stdout', 'stderr']; - -const hasAlias = options => aliases.some(alias => options[alias] !== undefined); - -const normalizeStdio = options => { - if (!options) { - return; - } - - const {stdio} = options; - - if (stdio === undefined) { - return aliases.map(alias => options[alias]); - } - - if (hasAlias(options)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${aliases.map(alias => `\`${alias}\``).join(', ')}`); - } - - if (typeof stdio === 'string') { - return stdio; - } - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } - - const length = Math.max(stdio.length, aliases.length); - return Array.from({length}, (value, index) => stdio[index]); -}; - -module.exports = normalizeStdio; - -// `ipc` is pushed unless it is already present -module.exports.node = options => { - const stdio = normalizeStdio(options); - - if (stdio === 'ipc') { - return 'ipc'; - } - - if (stdio === undefined || typeof stdio === 'string') { - return [stdio, stdio, stdio, 'ipc']; - } - - if (stdio.includes('ipc')) { - return stdio; - } - - return [...stdio, 'ipc']; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js deleted file mode 100644 index d445dd4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/lib/stream.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict'; -const isStream = require('is-stream'); -const getStream = require('get-stream'); -const mergeStream = require('merge-stream'); - -// `input` option -const handleInput = (spawned, input) => { - // Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852 - // @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0 - if (input === undefined || spawned.stdin === undefined) { - return; - } - - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); - } -}; - -// `all` interleaves `stdout` and `stderr` -const makeAllStream = (spawned, {all}) => { - if (!all || (!spawned.stdout && !spawned.stderr)) { - return; - } - - const mixed = mergeStream(); - - if (spawned.stdout) { - mixed.add(spawned.stdout); - } - - if (spawned.stderr) { - mixed.add(spawned.stderr); - } - - return mixed; -}; - -// On failure, `result.stdout|stderr|all` should contain the currently buffered stream -const getBufferedData = async (stream, streamPromise) => { - if (!stream) { - return; - } - - stream.destroy(); - - try { - return await streamPromise; - } catch (error) { - return error.bufferedData; - } -}; - -const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => { - if (!stream || !buffer) { - return; - } - - if (encoding) { - return getStream(stream, {encoding, maxBuffer}); - } - - return getStream.buffer(stream, {maxBuffer}); -}; - -// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all) -const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => { - const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer}); - const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer}); - const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2}); - - try { - return await Promise.all([processDone, stdoutPromise, stderrPromise, allPromise]); - } catch (error) { - return Promise.all([ - {error, signal: error.signal, timedOut: error.timedOut}, - getBufferedData(stdout, stdoutPromise), - getBufferedData(stderr, stderrPromise), - getBufferedData(all, allPromise) - ]); - } -}; - -const validateInputSync = ({input}) => { - if (isStream(input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); - } -}; - -module.exports = { - handleInput, - makeAllStream, - getSpawnedResult, - validateInputSync -}; - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json deleted file mode 100644 index 22556f2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "execa", - "version": "5.1.1", - "description": "Process execution for humans", - "license": "MIT", - "repository": "sindresorhus/execa", - "funding": "https://github.com/sindresorhus/execa?sponsor=1", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=10" - }, - "scripts": { - "test": "xo && nyc ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "lib" - ], - "keywords": [ - "exec", - "child", - "process", - "execute", - "fork", - "execfile", - "spawn", - "file", - "shell", - "bin", - "binary", - "binaries", - "npm", - "path", - "local" - ], - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "devDependencies": { - "@types/node": "^14.14.10", - "ava": "^2.4.0", - "get-node": "^11.0.1", - "is-running": "^2.1.0", - "nyc": "^15.1.0", - "p-event": "^4.2.0", - "tempfile": "^3.0.0", - "tsd": "^0.13.1", - "xo": "^0.35.0" - }, - "nyc": { - "reporter": [ - "text", - "lcov" - ], - "exclude": [ - "**/fixtures/**", - "**/test.js", - "**/test/**" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md deleted file mode 100644 index 843edbc..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/execa/readme.md +++ /dev/null @@ -1,663 +0,0 @@ - -
- -[![Coverage Status](https://codecov.io/gh/sindresorhus/execa/branch/main/graph/badge.svg)](https://codecov.io/gh/sindresorhus/execa) - -> Process execution for humans - -## Why - -This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with: - -- Promise interface. -- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`. -- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform. -- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why) -- Higher max buffer. 100 MB instead of 200 KB. -- [Executes locally installed binaries by name.](#preferlocal) -- [Cleans up spawned processes when the parent process dies.](#cleanup) -- [Get interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal. [*(Async only)*](#execasyncfile-arguments-options) -- [Can specify file and arguments as a single string without a shell](#execacommandcommand-options) -- More descriptive errors. - -## Install - -``` -$ npm install execa -``` - -## Usage - -```js -const execa = require('execa'); - -(async () => { - const {stdout} = await execa('echo', ['unicorns']); - console.log(stdout); - //=> 'unicorns' -})(); -``` - -### Pipe the child process stdout to the parent - -```js -const execa = require('execa'); - -execa('echo', ['unicorns']).stdout.pipe(process.stdout); -``` - -### Handling Errors - -```js -const execa = require('execa'); - -(async () => { - // Catching an error - try { - await execa('unknown', ['command']); - } catch (error) { - console.log(error); - /* - { - message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', - errno: -2, - code: 'ENOENT', - syscall: 'spawn unknown', - path: 'unknown', - spawnargs: ['command'], - originalMessage: 'spawn unknown ENOENT', - shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT', - command: 'unknown command', - escapedCommand: 'unknown command', - stdout: '', - stderr: '', - all: '', - failed: true, - timedOut: false, - isCanceled: false, - killed: false - } - */ - } - -})(); -``` - -### Cancelling a spawned process - -```js -const execa = require('execa'); - -(async () => { - const subprocess = execa('node'); - - setTimeout(() => { - subprocess.cancel(); - }, 1000); - - try { - await subprocess; - } catch (error) { - console.log(subprocess.killed); // true - console.log(error.isCanceled); // true - } -})() -``` - -### Catching an error with the sync method - -```js -try { - execa.sync('unknown', ['command']); -} catch (error) { - console.log(error); - /* - { - message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', - errno: -2, - code: 'ENOENT', - syscall: 'spawnSync unknown', - path: 'unknown', - spawnargs: ['command'], - originalMessage: 'spawnSync unknown ENOENT', - shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT', - command: 'unknown command', - escapedCommand: 'unknown command', - stdout: '', - stderr: '', - all: '', - failed: true, - timedOut: false, - isCanceled: false, - killed: false - } - */ -} -``` - -### Kill a process - -Using SIGTERM, and after 2 seconds, kill it with SIGKILL. - -```js -const subprocess = execa('node'); - -setTimeout(() => { - subprocess.kill('SIGTERM', { - forceKillAfterTimeout: 2000 - }); -}, 1000); -``` - -## API - -### execa(file, arguments, options?) - -Execute a file. Think of this as a mix of [`child_process.execFile()`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback) and [`child_process.spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). - -No escaping/quoting is needed. - -Unless the [`shell`](#shell) option is used, no shell interpreter (Bash, `cmd.exe`, etc.) is used, so shell features such as variables substitution (`echo $PATH`) are not allowed. - -Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) which: - - is also a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult). - - exposes the following additional methods and properties. - -#### kill(signal?, options?) - -Same as the original [`child_process#kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal) except: if `signal` is `SIGTERM` (the default value) and the child process is not terminated after 5 seconds, force it by sending `SIGKILL`. - -##### options.forceKillAfterTimeout - -Type: `number | false`\ -Default: `5000` - -Milliseconds to wait for the child process to terminate before sending `SIGKILL`. - -Can be disabled with `false`. - -#### cancel() - -Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`. - -#### all - -Type: `ReadableStream | undefined` - -Stream combining/interleaving [`stdout`](https://nodejs.org/api/child_process.html#child_process_subprocess_stdout) and [`stderr`](https://nodejs.org/api/child_process.html#child_process_subprocess_stderr). - -This is `undefined` if either: - - the [`all` option](#all-2) is `false` (the default value) - - both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio) - -### execa.sync(file, arguments?, options?) - -Execute a file synchronously. - -Returns or throws a [`childProcessResult`](#childProcessResult). - -### execa.command(command, options?) - -Same as [`execa()`](#execafile-arguments-options) except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`. - -If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed. - -The [`shell` option](#shell) must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`. - -### execa.commandSync(command, options?) - -Same as [`execa.command()`](#execacommand-command-options) but synchronous. - -Returns or throws a [`childProcessResult`](#childProcessResult). - -### execa.node(scriptPath, arguments?, options?) - -Execute a Node.js script as a child process. - -Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)): - - the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options. - - the [`shell`](#shell) option cannot be used - - an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio) - -### childProcessResult - -Type: `object` - -Result of a child process execution. On success this is a plain object. On failure this is also an `Error` instance. - -The child process [fails](#failed) when: -- its [exit code](#exitcode) is not `0` -- it was [killed](#killed) with a [signal](#signal) -- [timing out](#timedout) -- [being canceled](#iscanceled) -- there's not enough memory or there are already too many child processes - -#### command - -Type: `string` - -The file and arguments that were run, for logging purposes. - -This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). - -#### escapedCommand - -Type: `string` - -Same as [`command`](#command) but escaped. - -This is meant to be copy and pasted into a shell, for debugging purposes. -Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options). - -#### exitCode - -Type: `number` - -The numeric exit code of the process that was run. - -#### stdout - -Type: `string | Buffer` - -The output of the process on stdout. - -#### stderr - -Type: `string | Buffer` - -The output of the process on stderr. - -#### all - -Type: `string | Buffer | undefined` - -The output of the process with `stdout` and `stderr` interleaved. - -This is `undefined` if either: - - the [`all` option](#all-2) is `false` (the default value) - - `execa.sync()` was used - -#### failed - -Type: `boolean` - -Whether the process failed to run. - -#### timedOut - -Type: `boolean` - -Whether the process timed out. - -#### isCanceled - -Type: `boolean` - -Whether the process was canceled. - -#### killed - -Type: `boolean` - -Whether the process was killed. - -#### signal - -Type: `string | undefined` - -The name of the signal that was used to terminate the process. For example, `SIGFPE`. - -If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. - -#### signalDescription - -Type: `string | undefined` - -A human-friendly description of the signal that was used to terminate the process. For example, `Floating point arithmetic error`. - -If a signal terminated the process, this property is defined and included in the error message. Otherwise it is `undefined`. It is also `undefined` when the signal is very uncommon which should seldomly happen. - -#### message - -Type: `string` - -Error message when the child process failed to run. In addition to the [underlying error message](#originalMessage), it also contains some information related to why the child process errored. - -The child process [stderr](#stderr) then [stdout](#stdout) are appended to the end, separated with newlines and not interleaved. - -#### shortMessage - -Type: `string` - -This is the same as the [`message` property](#message) except it does not include the child process stdout/stderr. - -#### originalMessage - -Type: `string | undefined` - -Original error message. This is the same as the `message` property except it includes neither the child process stdout/stderr nor some additional information added by Execa. - -This is `undefined` unless the child process exited due to an `error` event or a timeout. - -### options - -Type: `object` - -#### cleanup - -Type: `boolean`\ -Default: `true` - -Kill the spawned process when the parent process exits unless either: - - the spawned process is [`detached`](https://nodejs.org/api/child_process.html#child_process_options_detached) - - the parent process is terminated abruptly, for example, with `SIGKILL` as opposed to `SIGTERM` or a normal exit - -#### preferLocal - -Type: `boolean`\ -Default: `false` - -Prefer locally installed binaries when looking for a binary to execute.\ -If you `$ npm install foo`, you can then `execa('foo')`. - -#### localDir - -Type: `string`\ -Default: `process.cwd()` - -Preferred path to find locally installed binaries in (use with `preferLocal`). - -#### execPath - -Type: `string`\ -Default: `process.execPath` (Current Node.js executable) - -Path to the Node.js executable to use in child processes. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - -Requires [`preferLocal`](#preferlocal) to be `true`. - -For example, this can be used together with [`get-node`](https://github.com/ehmicky/get-node) to run a specific Node.js version in a child process. - -#### buffer - -Type: `boolean`\ -Default: `true` - -Buffer the output from the spawned process. When set to `false`, you must read the output of [`stdout`](#stdout-1) and [`stderr`](#stderr-1) (or [`all`](#all) if the [`all`](#all-2) option is `true`). Otherwise the returned promise will not be resolved/rejected. - -If the spawned process fails, [`error.stdout`](#stdout), [`error.stderr`](#stderr), and [`error.all`](#all) will contain the buffered data. - -#### input - -Type: `string | Buffer | stream.Readable` - -Write some input to the `stdin` of your binary.\ -Streams are not allowed when using the synchronous methods. - -#### stdin - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### stdout - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### stderr - -Type: `string | number | Stream | undefined`\ -Default: `pipe` - -Same options as [`stdio`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio). - -#### all - -Type: `boolean`\ -Default: `false` - -Add an `.all` property on the [promise](#all) and the [resolved value](#all-1). The property contains the output of the process with `stdout` and `stderr` interleaved. - -#### reject - -Type: `boolean`\ -Default: `true` - -Setting this to `false` resolves the promise with the error instead of rejecting it. - -#### stripFinalNewline - -Type: `boolean`\ -Default: `true` - -Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from the output. - -#### extendEnv - -Type: `boolean`\ -Default: `true` - -Set to `false` if you don't want to extend the environment variables when providing the `env` property. - ---- - -Execa also accepts the below options which are the same as the options for [`child_process#spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options)/[`child_process#exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) - -#### cwd - -Type: `string`\ -Default: `process.cwd()` - -Current working directory of the child process. - -#### env - -Type: `object`\ -Default: `process.env` - -Environment key-value pairs. Extends automatically from `process.env`. Set [`extendEnv`](#extendenv) to `false` if you don't want this. - -#### argv0 - -Type: `string` - -Explicitly set the value of `argv[0]` sent to the child process. This will be set to `file` if not specified. - -#### stdio - -Type: `string | string[]`\ -Default: `pipe` - -Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_stdio) configuration. - -#### serialization - -Type: `string`\ -Default: `'json'` - -Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execa.node()`](#execanodescriptpath-arguments-options): - - `json`: Uses `JSON.stringify()` and `JSON.parse()`. - - `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value) - -Requires Node.js `13.2.0` or later. - -[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization) - -#### detached - -Type: `boolean` - -Prepare child to run independently of its parent process. Specific behavior [depends on the platform](https://nodejs.org/api/child_process.html#child_process_options_detached). - -#### uid - -Type: `number` - -Sets the user identity of the process. - -#### gid - -Type: `number` - -Sets the group identity of the process. - -#### shell - -Type: `boolean | string`\ -Default: `false` - -If `true`, runs `file` inside of a shell. Uses `/bin/sh` on UNIX and `cmd.exe` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX or `/d /s /c` on Windows. - -We recommend against using this option since it is: -- not cross-platform, encouraging shell-specific syntax. -- slower, because of the additional shell interpretation. -- unsafe, potentially allowing command injection. - -#### encoding - -Type: `string | null`\ -Default: `utf8` - -Specify the character encoding used to decode the `stdout` and `stderr` output. If set to `null`, then `stdout` and `stderr` will be a `Buffer` instead of a string. - -#### timeout - -Type: `number`\ -Default: `0` - -If timeout is greater than `0`, the parent will send the signal identified by the `killSignal` property (the default is `SIGTERM`) if the child runs longer than timeout milliseconds. - -#### maxBuffer - -Type: `number`\ -Default: `100_000_000` (100 MB) - -Largest amount of data in bytes allowed on `stdout` or `stderr`. - -#### killSignal - -Type: `string | number`\ -Default: `SIGTERM` - -Signal value to be used when the spawned process will be killed. - -#### windowsVerbatimArguments - -Type: `boolean`\ -Default: `false` - -If `true`, no quoting or escaping of arguments is done on Windows. Ignored on other platforms. This is set to `true` automatically when the `shell` option is `true`. - -#### windowsHide - -Type: `boolean`\ -Default: `true` - -On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows. - -#### nodePath *(For `.node()` only)* - -Type: `string`\ -Default: [`process.execPath`](https://nodejs.org/api/process.html#process_process_execpath) - -Node.js executable used to create the child process. - -#### nodeOptions *(For `.node()` only)* - -Type: `string[]`\ -Default: [`process.execArgv`](https://nodejs.org/api/process.html#process_process_execargv) - -List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the Node.js executable. - -## Tips - -### Retry on error - -Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package: - -```js -const pRetry = require('p-retry'); - -const run = async () => { - const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']); - return results; -}; - -(async () => { - console.log(await pRetry(run, {retries: 5})); -})(); -``` - -### Save and pipe output from a child process - -Let's say you want to show the output of a child process in real-time while also saving it to a variable. - -```js -const execa = require('execa'); - -const subprocess = execa('echo', ['foo']); -subprocess.stdout.pipe(process.stdout); - -(async () => { - const {stdout} = await subprocess; - console.log('child output:', stdout); -})(); -``` - -### Redirect output to a file - -```js -const execa = require('execa'); - -const subprocess = execa('echo', ['foo']) -subprocess.stdout.pipe(fs.createWriteStream('stdout.txt')) -``` - -### Redirect input from a file - -```js -const execa = require('execa'); - -const subprocess = execa('cat') -fs.createReadStream('stdin.txt').pipe(subprocess.stdin) -``` - -### Execute the current package's binary - -```js -const {getBinPathSync} = require('get-bin-path'); - -const binPath = getBinPathSync(); -const subprocess = execa(binPath); -``` - -`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up. - -## Related - -- [gulp-execa](https://github.com/ehmicky/gulp-execa) - Gulp plugin for `execa` -- [nvexeca](https://github.com/ehmicky/nvexeca) - Run `execa` using any Node.js version -- [sudo-prompt](https://github.com/jorangreef/sudo-prompt) - Run commands with elevated privileges. - -## Maintainers - -- [Sindre Sorhus](https://github.com/sindresorhus) -- [@ehmicky](https://github.com/ehmicky) - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE deleted file mode 100644 index 65a9994..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Denis Malinochkin - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md deleted file mode 100644 index 1d7843a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/README.md +++ /dev/null @@ -1,830 +0,0 @@ -# fast-glob - -> It's a very fast and efficient [glob][glob_definition] library for [Node.js][node_js]. - -This package provides methods for traversing the file system and returning pathnames that matched a defined set of a specified pattern according to the rules used by the Unix Bash shell with some simplifications, meanwhile results are returned in **arbitrary order**. Quick, simple, effective. - -## Table of Contents - -
-Details - -* [Highlights](#highlights) -* [Old and modern mode](#old-and-modern-mode) -* [Pattern syntax](#pattern-syntax) - * [Basic syntax](#basic-syntax) - * [Advanced syntax](#advanced-syntax) -* [Installation](#installation) -* [API](#api) - * [Asynchronous](#asynchronous) - * [Synchronous](#synchronous) - * [Stream](#stream) - * [patterns](#patterns) - * [[options]](#options) - * [Helpers](#helpers) - * [generateTasks](#generatetaskspatterns-options) - * [isDynamicPattern](#isdynamicpatternpattern-options) - * [escapePath](#escapepathpath) - * [convertPathToPattern](#convertpathtopatternpath) -* [Options](#options-3) - * [Common](#common) - * [concurrency](#concurrency) - * [cwd](#cwd) - * [deep](#deep) - * [followSymbolicLinks](#followsymboliclinks) - * [fs](#fs) - * [ignore](#ignore) - * [suppressErrors](#suppresserrors) - * [throwErrorOnBrokenSymbolicLink](#throwerroronbrokensymboliclink) - * [Output control](#output-control) - * [absolute](#absolute) - * [markDirectories](#markdirectories) - * [objectMode](#objectmode) - * [onlyDirectories](#onlydirectories) - * [onlyFiles](#onlyfiles) - * [stats](#stats) - * [unique](#unique) - * [Matching control](#matching-control) - * [braceExpansion](#braceexpansion) - * [caseSensitiveMatch](#casesensitivematch) - * [dot](#dot) - * [extglob](#extglob) - * [globstar](#globstar) - * [baseNameMatch](#basenamematch) -* [FAQ](#faq) - * [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - * [How to write patterns on Windows?](#how-to-write-patterns-on-windows) - * [Why are parentheses match wrong?](#why-are-parentheses-match-wrong) - * [How to exclude directory from reading?](#how-to-exclude-directory-from-reading) - * [How to use UNC path?](#how-to-use-unc-path) - * [Compatible with `node-glob`?](#compatible-with-node-glob) -* [Benchmarks](#benchmarks) - * [Server](#server) - * [Nettop](#nettop) -* [Changelog](#changelog) -* [License](#license) - -
- -## Highlights - -* Fast. Probably the fastest. -* Supports multiple and negative patterns. -* Synchronous, Promise and Stream API. -* Object mode. Can return more than just strings. -* Error-tolerant. - -## Old and modern mode - -This package works in two modes, depending on the environment in which it is used. - -* **Old mode**. Node.js below 10.10 or when the [`stats`](#stats) option is *enabled*. -* **Modern mode**. Node.js 10.10+ and the [`stats`](#stats) option is *disabled*. - -The modern mode is faster. Learn more about the [internal mechanism][nodelib_fs_scandir_old_and_modern_modern]. - -## Pattern syntax - -> :warning: Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. - -There is more than one form of syntax: basic and advanced. Below is a brief overview of the supported features. Also pay attention to our [FAQ](#faq). - -> :book: This package uses [`micromatch`][micromatch] as a library for pattern matching. - -### Basic syntax - -* An asterisk (`*`) — matches everything except slashes (path separators), hidden files (names starting with `.`). -* A double star or globstar (`**`) — matches zero or more directories. -* Question mark (`?`) – matches any single character except slashes (path separators). -* Sequence (`[seq]`) — matches any character in sequence. - -> :book: A few additional words about the [basic matching behavior][picomatch_matching_behavior]. - -Some examples: - -* `src/**/*.js` — matches all files in the `src` directory (any level of nesting) that have the `.js` extension. -* `src/*.??` — matches all files in the `src` directory (only first level of nesting) that have a two-character extension. -* `file-[01].js` — matches files: `file-0.js`, `file-1.js`. - -### Advanced syntax - -* [Escapes characters][micromatch_backslashes] (`\\`) — matching special characters (`$^*+?()[]`) as literals. -* [POSIX character classes][picomatch_posix_brackets] (`[[:digit:]]`). -* [Extended globs][micromatch_extglobs] (`?(pattern-list)`). -* [Bash style brace expansions][micromatch_braces] (`{}`). -* [Regexp character classes][micromatch_regex_character_classes] (`[1-5]`). -* [Regex groups][regular_expressions_brackets] (`(a|b)`). - -> :book: A few additional words about the [advanced matching behavior][micromatch_extended_globbing]. - -Some examples: - -* `src/**/*.{css,scss}` — matches all files in the `src` directory (any level of nesting) that have the `.css` or `.scss` extension. -* `file-[[:digit:]].js` — matches files: `file-0.js`, `file-1.js`, …, `file-9.js`. -* `file-{1..3}.js` — matches files: `file-1.js`, `file-2.js`, `file-3.js`. -* `file-(1|2)` — matches files: `file-1.js`, `file-2.js`. - -## Installation - -```console -npm install fast-glob -``` - -## API - -### Asynchronous - -```js -fg(patterns, [options]) -fg.async(patterns, [options]) -fg.glob(patterns, [options]) -``` - -Returns a `Promise` with an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = await fg(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Synchronous - -```js -fg.sync(patterns, [options]) -fg.globSync(patterns, [options]) -``` - -Returns an array of matching entries. - -```js -const fg = require('fast-glob'); - -const entries = fg.sync(['.editorconfig', '**/index.js'], { dot: true }); - -// ['.editorconfig', 'services/index.js'] -``` - -### Stream - -```js -fg.stream(patterns, [options]) -fg.globStream(patterns, [options]) -``` - -Returns a [`ReadableStream`][node_js_stream_readable_streams] when the `data` event will be emitted with matching entry. - -```js -const fg = require('fast-glob'); - -const stream = fg.stream(['.editorconfig', '**/index.js'], { dot: true }); - -for await (const entry of stream) { - // .editorconfig - // services/index.js -} -``` - -#### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -> :1234: [Pattern syntax](#pattern-syntax) -> -> :warning: This package does not respect the order of patterns. First, all the negative patterns are applied, and only then the positive patterns. If you want to get a certain order of records, use sorting or split calls. - -#### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -### Helpers - -#### `generateTasks(patterns, [options])` - -Returns the internal representation of patterns ([`Task`](./src/managers/tasks.ts) is a combining patterns by base directory). - -```js -fg.generateTasks('*'); - -[{ - base: '.', // Parent directory for all patterns inside this task - dynamic: true, // Dynamic or static patterns are in this task - patterns: ['*'], - positive: ['*'], - negative: [] -}] -``` - -##### patterns - -* Required: `true` -* Type: `string | string[]` - -Any correct pattern(s). - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `isDynamicPattern(pattern, [options])` - -Returns `true` if the passed pattern is a dynamic pattern. - -> :1234: [What is a static or dynamic pattern?](#what-is-a-static-or-dynamic-pattern) - -```js -fg.isDynamicPattern('*'); // true -fg.isDynamicPattern('abc'); // false -``` - -##### pattern - -* Required: `true` -* Type: `string` - -Any correct pattern. - -##### [options] - -* Required: `false` -* Type: [`Options`](#options-3) - -See [Options](#options-3) section. - -#### `escapePath(path)` - -Returns the path with escaped special characters depending on the platform. - -* Posix: - * `*?|(){}[]`; - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * `\\` before non-special characters; -* Windows: - * `(){}[]` - * `!` at the beginning of line; - * `@+!` before the opening parenthesis; - * Characters like `*?|` cannot be used in the path ([windows_naming_conventions][windows_naming_conventions]), so they will not be escaped; - -```js -fg.escapePath('!abc'); -// \\!abc -fg.escapePath('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac' -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.posix.escapePath('C:\\Program Files (x86)\\**\\*'); -// C:\\\\Program Files \\(x86\\)\\*\\*\\* -fg.win32.escapePath('C:\\Program Files (x86)\\**\\*'); -// Windows: C:\\Program Files \\(x86\\)\\**\\* -``` - -#### `convertPathToPattern(path)` - -Converts a path to a pattern depending on the platform, including special character escaping. - -* Posix. Works similarly to the `fg.posix.escapePath` method. -* Windows. Works similarly to the `fg.win32.escapePath` method, additionally converting backslashes to forward slashes in cases where they are not escape characters (`!()+@{}[]`). - -```js -fg.convertPathToPattern('[OpenSource] mrmlnc – fast-glob (Deluxe Edition) 2014') + '/*.flac'; -// \\[OpenSource\\] mrmlnc – fast-glob \\(Deluxe Edition\\) 2014/*.flac - -fg.convertPathToPattern('C:/Program Files (x86)/**/*'); -// Posix: C:/Program Files \\(x86\\)/\\*\\*/\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.convertPathToPattern('C:\\Program Files (x86)\\**\\*'); -// Posix: C:\\\\Program Files \\(x86\\)\\*\\*\\* -// Windows: C:/Program Files \\(x86\\)/**/* - -fg.posix.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Posix: \\\\\\?\\\\c:\\\\Program Files \\(x86\\)/**/* (broken pattern) -fg.win32.convertPathToPattern('\\\\?\\c:\\Program Files (x86)') + '/**/*'; -// Windows: //?/c:/Program Files \\(x86\\)/**/* -``` - -## Options - -### Common options - -#### concurrency - -* Type: `number` -* Default: `os.cpus().length` - -Specifies the maximum number of concurrent requests from a reader to read directories. - -> :book: The higher the number, the higher the performance and load on the file system. If you want to read in quiet mode, set the value to a comfortable number or `1`. - -
- -More details - -In Node, there are [two types of threads][nodejs_thread_pool]: Event Loop (code) and a Thread Pool (fs, dns, …). The thread pool size controlled by the `UV_THREADPOOL_SIZE` environment variable. Its default size is 4 ([documentation][libuv_thread_pool]). The pool is one for all tasks within a single Node process. - -Any code can make 4 real concurrent accesses to the file system. The rest of the FS requests will wait in the queue. - -> :book: Each new instance of FG in the same Node process will use the same Thread pool. - -But this package also has the `concurrency` option. This option allows you to control the number of concurrent accesses to the FS at the package level. By default, this package has a value equal to the number of cores available for the current Node process. This allows you to set a value smaller than the pool size (`concurrency: 1`) or, conversely, to prepare tasks for the pool queue more quickly (`concurrency: Number.POSITIVE_INFINITY`). - -So, in fact, this package can **only make 4 concurrent requests to the FS**. You can increase this value by using an environment variable (`UV_THREADPOOL_SIZE`), but in practice this does not give a multiple advantage. - -
- -#### cwd - -* Type: `string` -* Default: `process.cwd()` - -The current working directory in which to search. - -#### deep - -* Type: `number` -* Default: `Infinity` - -Specifies the maximum depth of a read directory relative to the start directory. - -For example, you have the following tree: - -```js -dir/ -└── one/ // 1 - └── two/ // 2 - └── file.js // 3 -``` - -```js -// With base directory -fg.sync('dir/**', { onlyFiles: false, deep: 1 }); // ['dir/one'] -fg.sync('dir/**', { onlyFiles: false, deep: 2 }); // ['dir/one', 'dir/one/two'] - -// With cwd option -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 1 }); // ['one'] -fg.sync('**', { onlyFiles: false, cwd: 'dir', deep: 2 }); // ['one', 'one/two'] -``` - -> :book: If you specify a pattern with some base directory, this directory will not participate in the calculation of the depth of the found directories. Think of it as a [`cwd`](#cwd) option. - -#### followSymbolicLinks - -* Type: `boolean` -* Default: `true` - -Indicates whether to traverse descendants of symbolic link directories when expanding `**` patterns. - -> :book: Note that this option does not affect the base directory of the pattern. For example, if `./a` is a symlink to directory `./b` and you specified `['./a**', './b/**']` patterns, then directory `./a` will still be read. - -> :book: If the [`stats`](#stats) option is specified, the information about the symbolic link (`fs.lstat`) will be replaced with information about the entry (`fs.stat`) behind it. - -#### fs - -* Type: `FileSystemAdapter` -* Default: `fs.*` - -Custom implementation of methods for working with the file system. Supports objects with enumerable properties only. - -```ts -export interface FileSystemAdapter { - lstat?: typeof fs.lstat; - stat?: typeof fs.stat; - lstatSync?: typeof fs.lstatSync; - statSync?: typeof fs.statSync; - readdir?: typeof fs.readdir; - readdirSync?: typeof fs.readdirSync; -} -``` - -#### ignore - -* Type: `string[]` -* Default: `[]` - -An array of glob patterns to exclude matches. This is an alternative way to use negative patterns. - -```js -dir/ -├── package-lock.json -└── package.json -``` - -```js -fg.sync(['*.json', '!package-lock.json']); // ['package.json'] -fg.sync('*.json', { ignore: ['package-lock.json'] }); // ['package.json'] -``` - -#### suppressErrors - -* Type: `boolean` -* Default: `false` - -By default this package suppress only `ENOENT` errors. Set to `true` to suppress any error. - -> :book: Can be useful when the directory has entries with a special level of access. - -#### throwErrorOnBrokenSymbolicLink - -* Type: `boolean` -* Default: `false` - -Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`. - -> :book: This option has no effect on errors when reading the symbolic link directory. - -### Output control - -#### absolute - -* Type: `boolean` -* Default: `false` - -Return the absolute path for entries. - -```js -fg.sync('*.js', { absolute: false }); // ['index.js'] -fg.sync('*.js', { absolute: true }); // ['/home/user/index.js'] -``` - -> :book: This option is required if you want to use negative patterns with absolute path, for example, `!${__dirname}/*.js`. - -#### markDirectories - -* Type: `boolean` -* Default: `false` - -Mark the directory path with the final slash. - -```js -fg.sync('*', { onlyFiles: false, markDirectories: false }); // ['index.js', 'controllers'] -fg.sync('*', { onlyFiles: false, markDirectories: true }); // ['index.js', 'controllers/'] -``` - -#### objectMode - -* Type: `boolean` -* Default: `false` - -Returns objects (instead of strings) describing entries. - -```js -fg.sync('*', { objectMode: false }); // ['src/index.js'] -fg.sync('*', { objectMode: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: }] -``` - -The object has the following fields: - -* name (`string`) — the last part of the path (basename) -* path (`string`) — full path relative to the pattern base directory -* dirent ([`fs.Dirent`][node_js_fs_class_fs_dirent]) — instance of `fs.Dirent` - -> :book: An object is an internal representation of entry, so getting it does not affect performance. - -#### onlyDirectories - -* Type: `boolean` -* Default: `false` - -Return only directories. - -```js -fg.sync('*', { onlyDirectories: false }); // ['index.js', 'src'] -fg.sync('*', { onlyDirectories: true }); // ['src'] -``` - -> :book: If `true`, the [`onlyFiles`](#onlyfiles) option is automatically `false`. - -#### onlyFiles - -* Type: `boolean` -* Default: `true` - -Return only files. - -```js -fg.sync('*', { onlyFiles: false }); // ['index.js', 'src'] -fg.sync('*', { onlyFiles: true }); // ['index.js'] -``` - -#### stats - -* Type: `boolean` -* Default: `false` - -Enables an [object mode](#objectmode) with an additional field: - -* stats ([`fs.Stats`][node_js_fs_class_fs_stats]) — instance of `fs.Stats` - -```js -fg.sync('*', { stats: false }); // ['src/index.js'] -fg.sync('*', { stats: true }); // [{ name: 'index.js', path: 'src/index.js', dirent: , stats: }] -``` - -> :book: Returns `fs.stat` instead of `fs.lstat` for symbolic links when the [`followSymbolicLinks`](#followsymboliclinks) option is specified. -> -> :warning: Unlike [object mode](#objectmode) this mode requires additional calls to the file system. On average, this mode is slower at least twice. See [old and modern mode](#old-and-modern-mode) for more details. - -#### unique - -* Type: `boolean` -* Default: `true` - -Ensures that the returned entries are unique. - -```js -fg.sync(['*.json', 'package.json'], { unique: false }); // ['package.json', 'package.json'] -fg.sync(['*.json', 'package.json'], { unique: true }); // ['package.json'] -``` - -If `true` and similar entries are found, the result is the first found. - -### Matching control - -#### braceExpansion - -* Type: `boolean` -* Default: `true` - -Enables Bash-like brace expansion. - -> :1234: [Syntax description][bash_hackers_syntax_expansion_brace] or more [detailed description][micromatch_braces]. - -```js -dir/ -├── abd -├── acd -└── a{b,c}d -``` - -```js -fg.sync('a{b,c}d', { braceExpansion: false }); // ['a{b,c}d'] -fg.sync('a{b,c}d', { braceExpansion: true }); // ['abd', 'acd'] -``` - -#### caseSensitiveMatch - -* Type: `boolean` -* Default: `true` - -Enables a [case-sensitive][wikipedia_case_sensitivity] mode for matching files. - -```js -dir/ -├── file.txt -└── File.txt -``` - -```js -fg.sync('file.txt', { caseSensitiveMatch: false }); // ['file.txt', 'File.txt'] -fg.sync('file.txt', { caseSensitiveMatch: true }); // ['file.txt'] -``` - -#### dot - -* Type: `boolean` -* Default: `false` - -Allow patterns to match entries that begin with a period (`.`). - -> :book: Note that an explicit dot in a portion of the pattern will always match dot files. - -```js -dir/ -├── .editorconfig -└── package.json -``` - -```js -fg.sync('*', { dot: false }); // ['package.json'] -fg.sync('*', { dot: true }); // ['.editorconfig', 'package.json'] -``` - -#### extglob - -* Type: `boolean` -* Default: `true` - -Enables Bash-like `extglob` functionality. - -> :1234: [Syntax description][micromatch_extglobs]. - -```js -dir/ -├── README.md -└── package.json -``` - -```js -fg.sync('*.+(json|md)', { extglob: false }); // [] -fg.sync('*.+(json|md)', { extglob: true }); // ['README.md', 'package.json'] -``` - -#### globstar - -* Type: `boolean` -* Default: `true` - -Enables recursively repeats a pattern containing `**`. If `false`, `**` behaves exactly like `*`. - -```js -dir/ -└── a - └── b -``` - -```js -fg.sync('**', { onlyFiles: false, globstar: false }); // ['a'] -fg.sync('**', { onlyFiles: false, globstar: true }); // ['a', 'a/b'] -``` - -#### baseNameMatch - -* Type: `boolean` -* Default: `false` - -If set to `true`, then patterns without slashes will be matched against the basename of the path if it contains slashes. - -```js -dir/ -└── one/ - └── file.md -``` - -```js -fg.sync('*.md', { baseNameMatch: false }); // [] -fg.sync('*.md', { baseNameMatch: true }); // ['one/file.md'] -``` - -## FAQ - -## What is a static or dynamic pattern? - -All patterns can be divided into two types: - -* **static**. A pattern is considered static if it can be used to get an entry on the file system without using matching mechanisms. For example, the `file.js` pattern is a static pattern because we can just verify that it exists on the file system. -* **dynamic**. A pattern is considered dynamic if it cannot be used directly to find occurrences without using a matching mechanisms. For example, the `*` pattern is a dynamic pattern because we cannot use this pattern directly. - -A pattern is considered dynamic if it contains the following characters (`…` — any characters or their absence) or options: - -* The [`caseSensitiveMatch`](#casesensitivematch) option is disabled -* `\\` (the escape character) -* `*`, `?`, `!` (at the beginning of line) -* `[…]` -* `(…|…)` -* `@(…)`, `!(…)`, `*(…)`, `?(…)`, `+(…)` (respects the [`extglob`](#extglob) option) -* `{…,…}`, `{…..…}` (respects the [`braceExpansion`](#braceexpansion) option) - -## How to write patterns on Windows? - -Always use forward-slashes in glob expressions (patterns and [`ignore`](#ignore) option). Use backslashes for escaping characters. With the [`cwd`](#cwd) option use a convenient format. - -**Bad** - -```ts -[ - 'directory\\*', - path.join(process.cwd(), '**') -] -``` - -**Good** - -```ts -[ - 'directory/*', - fg.convertPathToPattern(process.cwd()) + '/**' -] -``` - -> :book: Use the [`.convertPathToPattern`](#convertpathtopatternpath) package to convert Windows-style path to a Unix-style path. - -Read more about [matching with backslashes][micromatch_backslashes]. - -## Why are parentheses match wrong? - -```js -dir/ -└── (special-*file).txt -``` - -```js -fg.sync(['(special-*file).txt']) // [] -``` - -Refers to Bash. You need to escape special characters: - -```js -fg.sync(['\\(special-*file\\).txt']) // ['(special-*file).txt'] -``` - -Read more about [matching special characters as literals][picomatch_matching_special_characters_as_literals]. Or use the [`.escapePath`](#escapepathpath). - -## How to exclude directory from reading? - -You can use a negative pattern like this: `!**/node_modules` or `!**/node_modules/**`. Also you can use [`ignore`](#ignore) option. Just look at the example below. - -```js -first/ -├── file.md -└── second/ - └── file.txt -``` - -If you don't want to read the `second` directory, you must write the following pattern: `!**/second` or `!**/second/**`. - -```js -fg.sync(['**/*.md', '!**/second']); // ['first/file.md'] -fg.sync(['**/*.md'], { ignore: ['**/second/**'] }); // ['first/file.md'] -``` - -> :warning: When you write `!**/second/**/*` it means that the directory will be **read**, but all the entries will not be included in the results. - -You have to understand that if you write the pattern to exclude directories, then the directory will not be read under any circumstances. - -## How to use UNC path? - -You cannot use [Uniform Naming Convention (UNC)][unc_path] paths as patterns (due to syntax) directly, but you can use them as [`cwd`](#cwd) directory or use the `fg.convertPathToPattern` method. - -```ts -// cwd -fg.sync('*', { cwd: '\\\\?\\C:\\Python27' /* or //?/C:/Python27 */ }); -fg.sync('Python27/*', { cwd: '\\\\?\\C:\\' /* or //?/C:/ */ }); - -// .convertPathToPattern -fg.sync(fg.convertPathToPattern('\\\\?\\c:\\Python27') + '/*'); -``` - -## Compatible with `node-glob`? - -| node-glob | fast-glob | -| :----------: | :-------: | -| `cwd` | [`cwd`](#cwd) | -| `root` | – | -| `dot` | [`dot`](#dot) | -| `nomount` | – | -| `mark` | [`markDirectories`](#markdirectories) | -| `nosort` | – | -| `nounique` | [`unique`](#unique) | -| `nobrace` | [`braceExpansion`](#braceexpansion) | -| `noglobstar` | [`globstar`](#globstar) | -| `noext` | [`extglob`](#extglob) | -| `nocase` | [`caseSensitiveMatch`](#casesensitivematch) | -| `matchBase` | [`baseNameMatch`](#basenamematch) | -| `nodir` | [`onlyFiles`](#onlyfiles) | -| `ignore` | [`ignore`](#ignore) | -| `follow` | [`followSymbolicLinks`](#followsymboliclinks) | -| `realpath` | – | -| `absolute` | [`absolute`](#absolute) | - -## Benchmarks - -You can see results [here](https://github.com/mrmlnc/fast-glob/actions/workflows/benchmark.yml?query=branch%3Amaster) for every commit into the `main` branch. - -* **Product benchmark** – comparison with the main competitors. -* **Regress benchmark** – regression between the current version and the version from the npm registry. - -## Changelog - -See the [Releases section of our GitHub project][github_releases] for changelog for each release version. - -## License - -This software is released under the terms of the MIT license. - -[bash_hackers_syntax_expansion_brace]: https://wiki.bash-hackers.org/syntax/expansion/brace -[github_releases]: https://github.com/mrmlnc/fast-glob/releases -[glob_definition]: https://en.wikipedia.org/wiki/Glob_(programming) -[glob_linux_man]: http://man7.org/linux/man-pages/man3/glob.3.html -[micromatch_backslashes]: https://github.com/micromatch/micromatch#backslashes -[micromatch_braces]: https://github.com/micromatch/braces -[micromatch_extended_globbing]: https://github.com/micromatch/micromatch#extended-globbing -[micromatch_extglobs]: https://github.com/micromatch/micromatch#extglobs -[micromatch_regex_character_classes]: https://github.com/micromatch/micromatch#regex-character-classes -[micromatch]: https://github.com/micromatch/micromatch -[node_js_fs_class_fs_dirent]: https://nodejs.org/api/fs.html#fs_class_fs_dirent -[node_js_fs_class_fs_stats]: https://nodejs.org/api/fs.html#fs_class_fs_stats -[node_js_stream_readable_streams]: https://nodejs.org/api/stream.html#stream_readable_streams -[node_js]: https://nodejs.org/en -[nodelib_fs_scandir_old_and_modern_modern]: https://github.com/nodelib/nodelib/blob/master/packages/fs/fs.scandir/README.md#old-and-modern-mode -[npm_normalize_path]: https://www.npmjs.com/package/normalize-path -[npm_unixify]: https://www.npmjs.com/package/unixify -[picomatch_matching_behavior]: https://github.com/micromatch/picomatch#matching-behavior-vs-bash -[picomatch_matching_special_characters_as_literals]: https://github.com/micromatch/picomatch#matching-special-characters-as-literals -[picomatch_posix_brackets]: https://github.com/micromatch/picomatch#posix-brackets -[regular_expressions_brackets]: https://www.regular-expressions.info/brackets.html -[unc_path]: https://learn.microsoft.com/openspecs/windows_protocols/ms-dtyp/62e862f4-2a51-452e-8eeb-dc4ff5ee33cc -[wikipedia_case_sensitivity]: https://en.wikipedia.org/wiki/Case_sensitivity -[nodejs_thread_pool]: https://nodejs.org/en/docs/guides/dont-block-the-event-loop -[libuv_thread_pool]: http://docs.libuv.org/en/v1.x/threadpool.html -[windows_naming_conventions]: https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts deleted file mode 100644 index 46823bb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/// -import * as taskManager from './managers/tasks'; -import { Options as OptionsInternal } from './settings'; -import { Entry as EntryInternal, FileSystemAdapter as FileSystemAdapterInternal, Pattern as PatternInternal } from './types'; -type EntryObjectModePredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryStatsPredicate = { - [TKey in keyof Pick]-?: true; -}; -type EntryObjectPredicate = EntryObjectModePredicate | EntryStatsPredicate; -declare function FastGlob(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): Promise; -declare function FastGlob(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Promise; -declare namespace FastGlob { - type Options = OptionsInternal; - type Entry = EntryInternal; - type Task = taskManager.Task; - type Pattern = PatternInternal; - type FileSystemAdapter = FileSystemAdapterInternal; - const glob: typeof FastGlob; - const globSync: typeof sync; - const globStream: typeof stream; - const async: typeof FastGlob; - function sync(source: PatternInternal | PatternInternal[], options: OptionsInternal & EntryObjectPredicate): EntryInternal[]; - function sync(source: PatternInternal | PatternInternal[], options?: OptionsInternal): string[]; - function stream(source: PatternInternal | PatternInternal[], options?: OptionsInternal): NodeJS.ReadableStream; - function generateTasks(source: PatternInternal | PatternInternal[], options?: OptionsInternal): Task[]; - function isDynamicPattern(source: PatternInternal, options?: OptionsInternal): boolean; - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - namespace posix { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } - namespace win32 { - function escapePath(source: string): PatternInternal; - function convertPathToPattern(source: string): PatternInternal; - } -} -export = FastGlob; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js deleted file mode 100644 index 90365d4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/index.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; -const taskManager = require("./managers/tasks"); -const async_1 = require("./providers/async"); -const stream_1 = require("./providers/stream"); -const sync_1 = require("./providers/sync"); -const settings_1 = require("./settings"); -const utils = require("./utils"); -async function FastGlob(source, options) { - assertPatternsInput(source); - const works = getWorks(source, async_1.default, options); - const result = await Promise.all(works); - return utils.array.flatten(result); -} -// https://github.com/typescript-eslint/typescript-eslint/issues/60 -// eslint-disable-next-line no-redeclare -(function (FastGlob) { - FastGlob.glob = FastGlob; - FastGlob.globSync = sync; - FastGlob.globStream = stream; - FastGlob.async = FastGlob; - function sync(source, options) { - assertPatternsInput(source); - const works = getWorks(source, sync_1.default, options); - return utils.array.flatten(works); - } - FastGlob.sync = sync; - function stream(source, options) { - assertPatternsInput(source); - const works = getWorks(source, stream_1.default, options); - /** - * The stream returned by the provider cannot work with an asynchronous iterator. - * To support asynchronous iterators, regardless of the number of tasks, we always multiplex streams. - * This affects performance (+25%). I don't see best solution right now. - */ - return utils.stream.merge(works); - } - FastGlob.stream = stream; - function generateTasks(source, options) { - assertPatternsInput(source); - const patterns = [].concat(source); - const settings = new settings_1.default(options); - return taskManager.generate(patterns, settings); - } - FastGlob.generateTasks = generateTasks; - function isDynamicPattern(source, options) { - assertPatternsInput(source); - const settings = new settings_1.default(options); - return utils.pattern.isDynamicPattern(source, settings); - } - FastGlob.isDynamicPattern = isDynamicPattern; - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escape(source); - } - FastGlob.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPathToPattern(source); - } - FastGlob.convertPathToPattern = convertPathToPattern; - let posix; - (function (posix) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapePosixPath(source); - } - posix.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertPosixPathToPattern(source); - } - posix.convertPathToPattern = convertPathToPattern; - })(posix = FastGlob.posix || (FastGlob.posix = {})); - let win32; - (function (win32) { - function escapePath(source) { - assertPatternsInput(source); - return utils.path.escapeWindowsPath(source); - } - win32.escapePath = escapePath; - function convertPathToPattern(source) { - assertPatternsInput(source); - return utils.path.convertWindowsPathToPattern(source); - } - win32.convertPathToPattern = convertPathToPattern; - })(win32 = FastGlob.win32 || (FastGlob.win32 = {})); -})(FastGlob || (FastGlob = {})); -function getWorks(source, _Provider, options) { - const patterns = [].concat(source); - const settings = new settings_1.default(options); - const tasks = taskManager.generate(patterns, settings); - const provider = new _Provider(settings); - return tasks.map(provider.read, provider); -} -function assertPatternsInput(input) { - const source = [].concat(input); - const isValidSource = source.every((item) => utils.string.isString(item) && !utils.string.isEmpty(item)); - if (!isValidSource) { - throw new TypeError('Patterns must be a string (non empty) or an array of strings'); - } -} -module.exports = FastGlob; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts deleted file mode 100644 index 59d2c42..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import Settings from '../settings'; -import { Pattern, PatternsGroup } from '../types'; -export type Task = { - base: string; - dynamic: boolean; - patterns: Pattern[]; - positive: Pattern[]; - negative: Pattern[]; -}; -export declare function generate(input: Pattern[], settings: Settings): Task[]; -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -export declare function convertPatternsToTasks(positive: Pattern[], negative: Pattern[], dynamic: boolean): Task[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -export declare function getNegativePatternsAsPositive(patterns: Pattern[], ignore: Pattern[]): Pattern[]; -export declare function groupPatternsByBaseDirectory(patterns: Pattern[]): PatternsGroup; -export declare function convertPatternGroupsToTasks(positive: PatternsGroup, negative: Pattern[], dynamic: boolean): Task[]; -export declare function convertPatternGroupToTask(base: string, positive: Pattern[], negative: Pattern[], dynamic: boolean): Task; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js deleted file mode 100644 index 335a765..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/managers/tasks.js +++ /dev/null @@ -1,110 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPatternGroupToTask = exports.convertPatternGroupsToTasks = exports.groupPatternsByBaseDirectory = exports.getNegativePatternsAsPositive = exports.getPositivePatterns = exports.convertPatternsToTasks = exports.generate = void 0; -const utils = require("../utils"); -function generate(input, settings) { - const patterns = processPatterns(input, settings); - const ignore = processPatterns(settings.ignore, settings); - const positivePatterns = getPositivePatterns(patterns); - const negativePatterns = getNegativePatternsAsPositive(patterns, ignore); - const staticPatterns = positivePatterns.filter((pattern) => utils.pattern.isStaticPattern(pattern, settings)); - const dynamicPatterns = positivePatterns.filter((pattern) => utils.pattern.isDynamicPattern(pattern, settings)); - const staticTasks = convertPatternsToTasks(staticPatterns, negativePatterns, /* dynamic */ false); - const dynamicTasks = convertPatternsToTasks(dynamicPatterns, negativePatterns, /* dynamic */ true); - return staticTasks.concat(dynamicTasks); -} -exports.generate = generate; -function processPatterns(input, settings) { - let patterns = input; - /** - * The original pattern like `{,*,**,a/*}` can lead to problems checking the depth when matching entry - * and some problems with the micromatch package (see fast-glob issues: #365, #394). - * - * To solve this problem, we expand all patterns containing brace expansion. This can lead to a slight slowdown - * in matching in the case of a large set of patterns after expansion. - */ - if (settings.braceExpansion) { - patterns = utils.pattern.expandPatternsWithBraceExpansion(patterns); - } - /** - * If the `baseNameMatch` option is enabled, we must add globstar to patterns, so that they can be used - * at any nesting level. - * - * We do this here, because otherwise we have to complicate the filtering logic. For example, we need to change - * the pattern in the filter before creating a regular expression. There is no need to change the patterns - * in the application. Only on the input. - */ - if (settings.baseNameMatch) { - patterns = patterns.map((pattern) => pattern.includes('/') ? pattern : `**/${pattern}`); - } - /** - * This method also removes duplicate slashes that may have been in the pattern or formed as a result of expansion. - */ - return patterns.map((pattern) => utils.pattern.removeDuplicateSlashes(pattern)); -} -/** - * Returns tasks grouped by basic pattern directories. - * - * Patterns that can be found inside (`./`) and outside (`../`) the current directory are handled separately. - * This is necessary because directory traversal starts at the base directory and goes deeper. - */ -function convertPatternsToTasks(positive, negative, dynamic) { - const tasks = []; - const patternsOutsideCurrentDirectory = utils.pattern.getPatternsOutsideCurrentDirectory(positive); - const patternsInsideCurrentDirectory = utils.pattern.getPatternsInsideCurrentDirectory(positive); - const outsideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsOutsideCurrentDirectory); - const insideCurrentDirectoryGroup = groupPatternsByBaseDirectory(patternsInsideCurrentDirectory); - tasks.push(...convertPatternGroupsToTasks(outsideCurrentDirectoryGroup, negative, dynamic)); - /* - * For the sake of reducing future accesses to the file system, we merge all tasks within the current directory - * into a global task, if at least one pattern refers to the root (`.`). In this case, the global task covers the rest. - */ - if ('.' in insideCurrentDirectoryGroup) { - tasks.push(convertPatternGroupToTask('.', patternsInsideCurrentDirectory, negative, dynamic)); - } - else { - tasks.push(...convertPatternGroupsToTasks(insideCurrentDirectoryGroup, negative, dynamic)); - } - return tasks; -} -exports.convertPatternsToTasks = convertPatternsToTasks; -function getPositivePatterns(patterns) { - return utils.pattern.getPositivePatterns(patterns); -} -exports.getPositivePatterns = getPositivePatterns; -function getNegativePatternsAsPositive(patterns, ignore) { - const negative = utils.pattern.getNegativePatterns(patterns).concat(ignore); - const positive = negative.map(utils.pattern.convertToPositivePattern); - return positive; -} -exports.getNegativePatternsAsPositive = getNegativePatternsAsPositive; -function groupPatternsByBaseDirectory(patterns) { - const group = {}; - return patterns.reduce((collection, pattern) => { - const base = utils.pattern.getBaseDirectory(pattern); - if (base in collection) { - collection[base].push(pattern); - } - else { - collection[base] = [pattern]; - } - return collection; - }, group); -} -exports.groupPatternsByBaseDirectory = groupPatternsByBaseDirectory; -function convertPatternGroupsToTasks(positive, negative, dynamic) { - return Object.keys(positive).map((base) => { - return convertPatternGroupToTask(base, positive[base], negative, dynamic); - }); -} -exports.convertPatternGroupsToTasks = convertPatternGroupsToTasks; -function convertPatternGroupToTask(base, positive, negative, dynamic) { - return { - dynamic, - positive, - negative, - base, - patterns: [].concat(positive, negative.map(utils.pattern.convertToNegativePattern)) - }; -} -exports.convertPatternGroupToTask = convertPatternGroupToTask; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts deleted file mode 100644 index 2742616..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import ReaderAsync from '../readers/async'; -import Provider from './provider'; -export default class ProviderAsync extends Provider> { - protected _reader: ReaderAsync; - read(task: Task): Promise; - api(root: string, task: Task, options: ReaderOptions): Promise; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js deleted file mode 100644 index 0c5286e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/async.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const async_1 = require("../readers/async"); -const provider_1 = require("./provider"); -class ProviderAsync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new async_1.default(this._settings); - } - async read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = await this.api(root, task, options); - return entries.map((entry) => options.transform(entry)); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderAsync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts deleted file mode 100644 index 377fab8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { MicromatchOptions, EntryFilterFunction, Pattern } from '../../types'; -import Settings from '../../settings'; -export default class DeepFilter { - private readonly _settings; - private readonly _micromatchOptions; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(basePath: string, positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _getMatcher; - private _getNegativePatternsRe; - private _filter; - private _isSkippedByDeep; - private _getEntryLevel; - private _isSkippedSymbolicLink; - private _isSkippedByPositivePatterns; - private _isSkippedByNegativePatterns; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js deleted file mode 100644 index 644bf41..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/deep.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -const partial_1 = require("../matchers/partial"); -class DeepFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - } - getFilter(basePath, positive, negative) { - const matcher = this._getMatcher(positive); - const negativeRe = this._getNegativePatternsRe(negative); - return (entry) => this._filter(basePath, entry, matcher, negativeRe); - } - _getMatcher(patterns) { - return new partial_1.default(patterns, this._settings, this._micromatchOptions); - } - _getNegativePatternsRe(patterns) { - const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern); - return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions); - } - _filter(basePath, entry, matcher, negativeRe) { - if (this._isSkippedByDeep(basePath, entry.path)) { - return false; - } - if (this._isSkippedSymbolicLink(entry)) { - return false; - } - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._isSkippedByPositivePatterns(filepath, matcher)) { - return false; - } - return this._isSkippedByNegativePatterns(filepath, negativeRe); - } - _isSkippedByDeep(basePath, entryPath) { - /** - * Avoid unnecessary depth calculations when it doesn't matter. - */ - if (this._settings.deep === Infinity) { - return false; - } - return this._getEntryLevel(basePath, entryPath) >= this._settings.deep; - } - _getEntryLevel(basePath, entryPath) { - const entryPathDepth = entryPath.split('/').length; - if (basePath === '') { - return entryPathDepth; - } - const basePathDepth = basePath.split('/').length; - return entryPathDepth - basePathDepth; - } - _isSkippedSymbolicLink(entry) { - return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink(); - } - _isSkippedByPositivePatterns(entryPath, matcher) { - return !this._settings.baseNameMatch && !matcher.match(entryPath); - } - _isSkippedByNegativePatterns(entryPath, patternsRe) { - return !utils.pattern.matchAny(entryPath, patternsRe); - } -} -exports.default = DeepFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts deleted file mode 100644 index 23db353..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import Settings from '../../settings'; -import { EntryFilterFunction, MicromatchOptions, Pattern } from '../../types'; -export default class EntryFilter { - private readonly _settings; - private readonly _micromatchOptions; - readonly index: Map; - constructor(_settings: Settings, _micromatchOptions: MicromatchOptions); - getFilter(positive: Pattern[], negative: Pattern[]): EntryFilterFunction; - private _filter; - private _isDuplicateEntry; - private _createIndexRecord; - private _onlyFileFilter; - private _onlyDirectoryFilter; - private _isMatchToPatternsSet; - private _isMatchToAbsoluteNegative; - private _isMatchToPatterns; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js deleted file mode 100644 index 0c9210c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/entry.js +++ /dev/null @@ -1,85 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryFilter { - constructor(_settings, _micromatchOptions) { - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this.index = new Map(); - } - getFilter(positive, negative) { - const [absoluteNegative, relativeNegative] = utils.pattern.partitionAbsoluteAndRelative(negative); - const patterns = { - positive: { - all: utils.pattern.convertPatternsToRe(positive, this._micromatchOptions) - }, - negative: { - absolute: utils.pattern.convertPatternsToRe(absoluteNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })), - relative: utils.pattern.convertPatternsToRe(relativeNegative, Object.assign(Object.assign({}, this._micromatchOptions), { dot: true })) - } - }; - return (entry) => this._filter(entry, patterns); - } - _filter(entry, patterns) { - const filepath = utils.path.removeLeadingDotSegment(entry.path); - if (this._settings.unique && this._isDuplicateEntry(filepath)) { - return false; - } - if (this._onlyFileFilter(entry) || this._onlyDirectoryFilter(entry)) { - return false; - } - const isMatched = this._isMatchToPatternsSet(filepath, patterns, entry.dirent.isDirectory()); - if (this._settings.unique && isMatched) { - this._createIndexRecord(filepath); - } - return isMatched; - } - _isDuplicateEntry(filepath) { - return this.index.has(filepath); - } - _createIndexRecord(filepath) { - this.index.set(filepath, undefined); - } - _onlyFileFilter(entry) { - return this._settings.onlyFiles && !entry.dirent.isFile(); - } - _onlyDirectoryFilter(entry) { - return this._settings.onlyDirectories && !entry.dirent.isDirectory(); - } - _isMatchToPatternsSet(filepath, patterns, isDirectory) { - const isMatched = this._isMatchToPatterns(filepath, patterns.positive.all, isDirectory); - if (!isMatched) { - return false; - } - const isMatchedByRelativeNegative = this._isMatchToPatterns(filepath, patterns.negative.relative, isDirectory); - if (isMatchedByRelativeNegative) { - return false; - } - const isMatchedByAbsoluteNegative = this._isMatchToAbsoluteNegative(filepath, patterns.negative.absolute, isDirectory); - if (isMatchedByAbsoluteNegative) { - return false; - } - return true; - } - _isMatchToAbsoluteNegative(filepath, patternsRe, isDirectory) { - if (patternsRe.length === 0) { - return false; - } - const fullpath = utils.path.makeAbsolute(this._settings.cwd, filepath); - return this._isMatchToPatterns(fullpath, patternsRe, isDirectory); - } - _isMatchToPatterns(filepath, patternsRe, isDirectory) { - if (patternsRe.length === 0) { - return false; - } - // Trying to match files and directories by patterns. - const isMatched = utils.pattern.matchAny(filepath, patternsRe); - // A pattern with a trailling slash can be used for directory matching. - // To apply such pattern, we need to add a tralling slash to the path. - if (!isMatched && isDirectory) { - return utils.pattern.matchAny(filepath + '/', patternsRe); - } - return isMatched; - } -} -exports.default = EntryFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts deleted file mode 100644 index 170eb25..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { ErrorFilterFunction } from '../../types'; -export default class ErrorFilter { - private readonly _settings; - constructor(_settings: Settings); - getFilter(): ErrorFilterFunction; - private _isNonFatalError; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js deleted file mode 100644 index 1c6f241..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/filters/error.js +++ /dev/null @@ -1,15 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class ErrorFilter { - constructor(_settings) { - this._settings = _settings; - } - getFilter() { - return (error) => this._isNonFatalError(error); - } - _isNonFatalError(error) { - return utils.errno.isEnoentCodeError(error) || this._settings.suppressErrors; - } -} -exports.default = ErrorFilter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts deleted file mode 100644 index d04c232..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.d.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { Pattern, MicromatchOptions, PatternRe } from '../../types'; -import Settings from '../../settings'; -export type PatternSegment = StaticPatternSegment | DynamicPatternSegment; -type StaticPatternSegment = { - dynamic: false; - pattern: Pattern; -}; -type DynamicPatternSegment = { - dynamic: true; - pattern: Pattern; - patternRe: PatternRe; -}; -export type PatternSection = PatternSegment[]; -export type PatternInfo = { - /** - * Indicates that the pattern has a globstar (more than a single section). - */ - complete: boolean; - pattern: Pattern; - segments: PatternSegment[]; - sections: PatternSection[]; -}; -export default abstract class Matcher { - private readonly _patterns; - private readonly _settings; - private readonly _micromatchOptions; - protected readonly _storage: PatternInfo[]; - constructor(_patterns: Pattern[], _settings: Settings, _micromatchOptions: MicromatchOptions); - private _fillStorage; - private _getPatternSegments; - private _splitSegmentsIntoSections; -} -export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js deleted file mode 100644 index eae67c9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/matcher.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class Matcher { - constructor(_patterns, _settings, _micromatchOptions) { - this._patterns = _patterns; - this._settings = _settings; - this._micromatchOptions = _micromatchOptions; - this._storage = []; - this._fillStorage(); - } - _fillStorage() { - for (const pattern of this._patterns) { - const segments = this._getPatternSegments(pattern); - const sections = this._splitSegmentsIntoSections(segments); - this._storage.push({ - complete: sections.length <= 1, - pattern, - segments, - sections - }); - } - } - _getPatternSegments(pattern) { - const parts = utils.pattern.getPatternParts(pattern, this._micromatchOptions); - return parts.map((part) => { - const dynamic = utils.pattern.isDynamicPattern(part, this._settings); - if (!dynamic) { - return { - dynamic: false, - pattern: part - }; - } - return { - dynamic: true, - pattern: part, - patternRe: utils.pattern.makeRe(part, this._micromatchOptions) - }; - }); - } - _splitSegmentsIntoSections(segments) { - return utils.array.splitWhen(segments, (segment) => segment.dynamic && utils.pattern.hasGlobStar(segment.pattern)); - } -} -exports.default = Matcher; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts deleted file mode 100644 index 91520f6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import Matcher from './matcher'; -export default class PartialMatcher extends Matcher { - match(filepath: string): boolean; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js deleted file mode 100644 index 1dfffeb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/matchers/partial.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const matcher_1 = require("./matcher"); -class PartialMatcher extends matcher_1.default { - match(filepath) { - const parts = filepath.split('/'); - const levels = parts.length; - const patterns = this._storage.filter((info) => !info.complete || info.segments.length > levels); - for (const pattern of patterns) { - const section = pattern.sections[0]; - /** - * In this case, the pattern has a globstar and we must read all directories unconditionally, - * but only if the level has reached the end of the first group. - * - * fixtures/{a,b}/** - * ^ true/false ^ always true - */ - if (!pattern.complete && levels > section.length) { - return true; - } - const match = parts.every((part, index) => { - const segment = pattern.segments[index]; - if (segment.dynamic && segment.patternRe.test(part)) { - return true; - } - if (!segment.dynamic && segment.pattern === part) { - return true; - } - return false; - }); - if (match) { - return true; - } - } - return false; - } -} -exports.default = PartialMatcher; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts deleted file mode 100644 index 1053460..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { Task } from '../managers/tasks'; -import Settings from '../settings'; -import { MicromatchOptions, ReaderOptions } from '../types'; -import DeepFilter from './filters/deep'; -import EntryFilter from './filters/entry'; -import ErrorFilter from './filters/error'; -import EntryTransformer from './transformers/entry'; -export default abstract class Provider { - protected readonly _settings: Settings; - readonly errorFilter: ErrorFilter; - readonly entryFilter: EntryFilter; - readonly deepFilter: DeepFilter; - readonly entryTransformer: EntryTransformer; - constructor(_settings: Settings); - abstract read(_task: Task): T; - protected _getRootDirectory(task: Task): string; - protected _getReaderOptions(task: Task): ReaderOptions; - protected _getMicromatchOptions(): MicromatchOptions; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js deleted file mode 100644 index da88ee0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/provider.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const deep_1 = require("./filters/deep"); -const entry_1 = require("./filters/entry"); -const error_1 = require("./filters/error"); -const entry_2 = require("./transformers/entry"); -class Provider { - constructor(_settings) { - this._settings = _settings; - this.errorFilter = new error_1.default(this._settings); - this.entryFilter = new entry_1.default(this._settings, this._getMicromatchOptions()); - this.deepFilter = new deep_1.default(this._settings, this._getMicromatchOptions()); - this.entryTransformer = new entry_2.default(this._settings); - } - _getRootDirectory(task) { - return path.resolve(this._settings.cwd, task.base); - } - _getReaderOptions(task) { - const basePath = task.base === '.' ? '' : task.base; - return { - basePath, - pathSegmentSeparator: '/', - concurrency: this._settings.concurrency, - deepFilter: this.deepFilter.getFilter(basePath, task.positive, task.negative), - entryFilter: this.entryFilter.getFilter(task.positive, task.negative), - errorFilter: this.errorFilter.getFilter(), - followSymbolicLinks: this._settings.followSymbolicLinks, - fs: this._settings.fs, - stats: this._settings.stats, - throwErrorOnBrokenSymbolicLink: this._settings.throwErrorOnBrokenSymbolicLink, - transform: this.entryTransformer.getTransformer() - }; - } - _getMicromatchOptions() { - return { - dot: this._settings.dot, - matchBase: this._settings.baseNameMatch, - nobrace: !this._settings.braceExpansion, - nocase: !this._settings.caseSensitiveMatch, - noext: !this._settings.extglob, - noglobstar: !this._settings.globstar, - posix: true, - strictSlashes: false - }; - } -} -exports.default = Provider; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts deleted file mode 100644 index 3d02a1f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -/// -import { Readable } from 'stream'; -import { Task } from '../managers/tasks'; -import ReaderStream from '../readers/stream'; -import { ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderStream extends Provider { - protected _reader: ReaderStream; - read(task: Task): Readable; - api(root: string, task: Task, options: ReaderOptions): Readable; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js deleted file mode 100644 index 85da62e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/stream.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const stream_2 = require("../readers/stream"); -const provider_1 = require("./provider"); -class ProviderStream extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new stream_2.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const source = this.api(root, task, options); - const destination = new stream_1.Readable({ objectMode: true, read: () => { } }); - source - .once('error', (error) => destination.emit('error', error)) - .on('data', (entry) => destination.emit('data', options.transform(entry))) - .once('end', () => destination.emit('end')); - destination - .once('close', () => source.destroy()); - return destination; - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts deleted file mode 100644 index 9c0fe1e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Task } from '../managers/tasks'; -import ReaderSync from '../readers/sync'; -import { Entry, EntryItem, ReaderOptions } from '../types'; -import Provider from './provider'; -export default class ProviderSync extends Provider { - protected _reader: ReaderSync; - read(task: Task): EntryItem[]; - api(root: string, task: Task, options: ReaderOptions): Entry[]; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js deleted file mode 100644 index d70aa1b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/sync.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const sync_1 = require("../readers/sync"); -const provider_1 = require("./provider"); -class ProviderSync extends provider_1.default { - constructor() { - super(...arguments); - this._reader = new sync_1.default(this._settings); - } - read(task) { - const root = this._getRootDirectory(task); - const options = this._getReaderOptions(task); - const entries = this.api(root, task, options); - return entries.map(options.transform); - } - api(root, task, options) { - if (task.dynamic) { - return this._reader.dynamic(root, options); - } - return this._reader.static(task.patterns, options); - } -} -exports.default = ProviderSync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts deleted file mode 100644 index e9b85fa..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import Settings from '../../settings'; -import { EntryTransformerFunction } from '../../types'; -export default class EntryTransformer { - private readonly _settings; - constructor(_settings: Settings); - getTransformer(): EntryTransformerFunction; - private _transform; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js deleted file mode 100644 index d11903c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/providers/transformers/entry.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const utils = require("../../utils"); -class EntryTransformer { - constructor(_settings) { - this._settings = _settings; - } - getTransformer() { - return (entry) => this._transform(entry); - } - _transform(entry) { - let filepath = entry.path; - if (this._settings.absolute) { - filepath = utils.path.makeAbsolute(this._settings.cwd, filepath); - filepath = utils.path.unixify(filepath); - } - if (this._settings.markDirectories && entry.dirent.isDirectory()) { - filepath += '/'; - } - if (!this._settings.objectMode) { - return filepath; - } - return Object.assign(Object.assign({}, entry), { path: filepath }); - } -} -exports.default = EntryTransformer; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts deleted file mode 100644 index fbca428..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, ReaderOptions, Pattern } from '../types'; -import Reader from './reader'; -import ReaderStream from './stream'; -export default class ReaderAsync extends Reader> { - protected _walkAsync: typeof fsWalk.walk; - protected _readerStream: ReaderStream; - dynamic(root: string, options: ReaderOptions): Promise; - static(patterns: Pattern[], options: ReaderOptions): Promise; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js deleted file mode 100644 index d024145..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/async.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -const stream_1 = require("./stream"); -class ReaderAsync extends reader_1.default { - constructor() { - super(...arguments); - this._walkAsync = fsWalk.walk; - this._readerStream = new stream_1.default(this._settings); - } - dynamic(root, options) { - return new Promise((resolve, reject) => { - this._walkAsync(root, options, (error, entries) => { - if (error === null) { - resolve(entries); - } - else { - reject(error); - } - }); - }); - } - async static(patterns, options) { - const entries = []; - const stream = this._readerStream.static(patterns, options); - // After #235, replace it with an asynchronous iterator. - return new Promise((resolve, reject) => { - stream.once('error', reject); - stream.on('data', (entry) => entries.push(entry)); - stream.once('end', () => resolve(entries)); - }); - } -} -exports.default = ReaderAsync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts deleted file mode 100644 index 2af16b6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -/// -import * as fs from 'fs'; -import * as fsStat from '@nodelib/fs.stat'; -import Settings from '../settings'; -import { Entry, ErrnoException, Pattern, ReaderOptions } from '../types'; -export default abstract class Reader { - protected readonly _settings: Settings; - protected readonly _fsStatSettings: fsStat.Settings; - constructor(_settings: Settings); - abstract dynamic(root: string, options: ReaderOptions): T; - abstract static(patterns: Pattern[], options: ReaderOptions): T; - protected _getFullEntryPath(filepath: string): string; - protected _makeEntry(stats: fs.Stats, pattern: Pattern): Entry; - protected _isFatalError(error: ErrnoException): boolean; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js deleted file mode 100644 index 7b40255..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/reader.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const fsStat = require("@nodelib/fs.stat"); -const utils = require("../utils"); -class Reader { - constructor(_settings) { - this._settings = _settings; - this._fsStatSettings = new fsStat.Settings({ - followSymbolicLink: this._settings.followSymbolicLinks, - fs: this._settings.fs, - throwErrorOnBrokenSymbolicLink: this._settings.followSymbolicLinks - }); - } - _getFullEntryPath(filepath) { - return path.resolve(this._settings.cwd, filepath); - } - _makeEntry(stats, pattern) { - const entry = { - name: pattern, - path: pattern, - dirent: utils.fs.createDirentFromStats(pattern, stats) - }; - if (this._settings.stats) { - entry.stats = stats; - } - return entry; - } - _isFatalError(error) { - return !utils.errno.isEnoentCodeError(error) && !this._settings.suppressErrors; - } -} -exports.default = Reader; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts deleted file mode 100644 index 1c74cac..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/// -import { Readable } from 'stream'; -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderStream extends Reader { - protected _walkStream: typeof fsWalk.walkStream; - protected _stat: typeof fsStat.stat; - dynamic(root: string, options: ReaderOptions): Readable; - static(patterns: Pattern[], options: ReaderOptions): Readable; - private _getEntry; - private _getStat; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js deleted file mode 100644 index 317c6d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/stream.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const stream_1 = require("stream"); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderStream extends reader_1.default { - constructor() { - super(...arguments); - this._walkStream = fsWalk.walkStream; - this._stat = fsStat.stat; - } - dynamic(root, options) { - return this._walkStream(root, options); - } - static(patterns, options) { - const filepaths = patterns.map(this._getFullEntryPath, this); - const stream = new stream_1.PassThrough({ objectMode: true }); - stream._write = (index, _enc, done) => { - return this._getEntry(filepaths[index], patterns[index], options) - .then((entry) => { - if (entry !== null && options.entryFilter(entry)) { - stream.push(entry); - } - if (index === filepaths.length - 1) { - stream.end(); - } - done(); - }) - .catch(done); - }; - for (let i = 0; i < filepaths.length; i++) { - stream.write(i); - } - return stream; - } - _getEntry(filepath, pattern, options) { - return this._getStat(filepath) - .then((stats) => this._makeEntry(stats, pattern)) - .catch((error) => { - if (options.errorFilter(error)) { - return null; - } - throw error; - }); - } - _getStat(filepath) { - return new Promise((resolve, reject) => { - this._stat(filepath, this._fsStatSettings, (error, stats) => { - return error === null ? resolve(stats) : reject(error); - }); - }); - } -} -exports.default = ReaderStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts deleted file mode 100644 index c96ffee..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import * as fsStat from '@nodelib/fs.stat'; -import * as fsWalk from '@nodelib/fs.walk'; -import { Entry, Pattern, ReaderOptions } from '../types'; -import Reader from './reader'; -export default class ReaderSync extends Reader { - protected _walkSync: typeof fsWalk.walkSync; - protected _statSync: typeof fsStat.statSync; - dynamic(root: string, options: ReaderOptions): Entry[]; - static(patterns: Pattern[], options: ReaderOptions): Entry[]; - private _getEntry; - private _getStat; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js deleted file mode 100644 index 4704d65..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/readers/sync.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const fsStat = require("@nodelib/fs.stat"); -const fsWalk = require("@nodelib/fs.walk"); -const reader_1 = require("./reader"); -class ReaderSync extends reader_1.default { - constructor() { - super(...arguments); - this._walkSync = fsWalk.walkSync; - this._statSync = fsStat.statSync; - } - dynamic(root, options) { - return this._walkSync(root, options); - } - static(patterns, options) { - const entries = []; - for (const pattern of patterns) { - const filepath = this._getFullEntryPath(pattern); - const entry = this._getEntry(filepath, pattern, options); - if (entry === null || !options.entryFilter(entry)) { - continue; - } - entries.push(entry); - } - return entries; - } - _getEntry(filepath, pattern, options) { - try { - const stats = this._getStat(filepath); - return this._makeEntry(stats, pattern); - } - catch (error) { - if (options.errorFilter(error)) { - return null; - } - throw error; - } - } - _getStat(filepath) { - return this._statSync(filepath, this._fsStatSettings); - } -} -exports.default = ReaderSync; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts deleted file mode 100644 index 76a74f8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.d.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { FileSystemAdapter, Pattern } from './types'; -export declare const DEFAULT_FILE_SYSTEM_ADAPTER: FileSystemAdapter; -export type Options = { - /** - * Return the absolute path for entries. - * - * @default false - */ - absolute?: boolean; - /** - * If set to `true`, then patterns without slashes will be matched against - * the basename of the path if it contains slashes. - * - * @default false - */ - baseNameMatch?: boolean; - /** - * Enables Bash-like brace expansion. - * - * @default true - */ - braceExpansion?: boolean; - /** - * Enables a case-sensitive mode for matching files. - * - * @default true - */ - caseSensitiveMatch?: boolean; - /** - * Specifies the maximum number of concurrent requests from a reader to read - * directories. - * - * @default os.cpus().length - */ - concurrency?: number; - /** - * The current working directory in which to search. - * - * @default process.cwd() - */ - cwd?: string; - /** - * Specifies the maximum depth of a read directory relative to the start - * directory. - * - * @default Infinity - */ - deep?: number; - /** - * Allow patterns to match entries that begin with a period (`.`). - * - * @default false - */ - dot?: boolean; - /** - * Enables Bash-like `extglob` functionality. - * - * @default true - */ - extglob?: boolean; - /** - * Indicates whether to traverse descendants of symbolic link directories. - * - * @default true - */ - followSymbolicLinks?: boolean; - /** - * Custom implementation of methods for working with the file system. - * - * @default fs.* - */ - fs?: Partial; - /** - * Enables recursively repeats a pattern containing `**`. - * If `false`, `**` behaves exactly like `*`. - * - * @default true - */ - globstar?: boolean; - /** - * An array of glob patterns to exclude matches. - * This is an alternative way to use negative patterns. - * - * @default [] - */ - ignore?: Pattern[]; - /** - * Mark the directory path with the final slash. - * - * @default false - */ - markDirectories?: boolean; - /** - * Returns objects (instead of strings) describing entries. - * - * @default false - */ - objectMode?: boolean; - /** - * Return only directories. - * - * @default false - */ - onlyDirectories?: boolean; - /** - * Return only files. - * - * @default true - */ - onlyFiles?: boolean; - /** - * Enables an object mode (`objectMode`) with an additional `stats` field. - * - * @default false - */ - stats?: boolean; - /** - * By default this package suppress only `ENOENT` errors. - * Set to `true` to suppress any error. - * - * @default false - */ - suppressErrors?: boolean; - /** - * Throw an error when symbolic link is broken if `true` or safely - * return `lstat` call if `false`. - * - * @default false - */ - throwErrorOnBrokenSymbolicLink?: boolean; - /** - * Ensures that the returned entries are unique. - * - * @default true - */ - unique?: boolean; -}; -export default class Settings { - private readonly _options; - readonly absolute: boolean; - readonly baseNameMatch: boolean; - readonly braceExpansion: boolean; - readonly caseSensitiveMatch: boolean; - readonly concurrency: number; - readonly cwd: string; - readonly deep: number; - readonly dot: boolean; - readonly extglob: boolean; - readonly followSymbolicLinks: boolean; - readonly fs: FileSystemAdapter; - readonly globstar: boolean; - readonly ignore: Pattern[]; - readonly markDirectories: boolean; - readonly objectMode: boolean; - readonly onlyDirectories: boolean; - readonly onlyFiles: boolean; - readonly stats: boolean; - readonly suppressErrors: boolean; - readonly throwErrorOnBrokenSymbolicLink: boolean; - readonly unique: boolean; - constructor(_options?: Options); - private _getValue; - private _getFileSystemMethods; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js deleted file mode 100644 index 23f916c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/settings.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; -const fs = require("fs"); -const os = require("os"); -/** - * The `os.cpus` method can return zero. We expect the number of cores to be greater than zero. - * https://github.com/nodejs/node/blob/7faeddf23a98c53896f8b574a6e66589e8fb1eb8/lib/os.js#L106-L107 - */ -const CPU_COUNT = Math.max(os.cpus().length, 1); -exports.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs.lstat, - lstatSync: fs.lstatSync, - stat: fs.stat, - statSync: fs.statSync, - readdir: fs.readdir, - readdirSync: fs.readdirSync -}; -class Settings { - constructor(_options = {}) { - this._options = _options; - this.absolute = this._getValue(this._options.absolute, false); - this.baseNameMatch = this._getValue(this._options.baseNameMatch, false); - this.braceExpansion = this._getValue(this._options.braceExpansion, true); - this.caseSensitiveMatch = this._getValue(this._options.caseSensitiveMatch, true); - this.concurrency = this._getValue(this._options.concurrency, CPU_COUNT); - this.cwd = this._getValue(this._options.cwd, process.cwd()); - this.deep = this._getValue(this._options.deep, Infinity); - this.dot = this._getValue(this._options.dot, false); - this.extglob = this._getValue(this._options.extglob, true); - this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, true); - this.fs = this._getFileSystemMethods(this._options.fs); - this.globstar = this._getValue(this._options.globstar, true); - this.ignore = this._getValue(this._options.ignore, []); - this.markDirectories = this._getValue(this._options.markDirectories, false); - this.objectMode = this._getValue(this._options.objectMode, false); - this.onlyDirectories = this._getValue(this._options.onlyDirectories, false); - this.onlyFiles = this._getValue(this._options.onlyFiles, true); - this.stats = this._getValue(this._options.stats, false); - this.suppressErrors = this._getValue(this._options.suppressErrors, false); - this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, false); - this.unique = this._getValue(this._options.unique, true); - if (this.onlyDirectories) { - this.onlyFiles = false; - } - if (this.stats) { - this.objectMode = true; - } - // Remove the cast to the array in the next major (#404). - this.ignore = [].concat(this.ignore); - } - _getValue(option, value) { - return option === undefined ? value : option; - } - _getFileSystemMethods(methods = {}) { - return Object.assign(Object.assign({}, exports.DEFAULT_FILE_SYSTEM_ADAPTER), methods); - } -} -exports.default = Settings; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts deleted file mode 100644 index 6506caf..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.d.ts +++ /dev/null @@ -1,31 +0,0 @@ -/// -import * as fsWalk from '@nodelib/fs.walk'; -export type ErrnoException = NodeJS.ErrnoException; -export type Entry = fsWalk.Entry; -export type EntryItem = string | Entry; -export type Pattern = string; -export type PatternRe = RegExp; -export type PatternsGroup = Record; -export type ReaderOptions = fsWalk.Options & { - transform(entry: Entry): EntryItem; - deepFilter: DeepFilterFunction; - entryFilter: EntryFilterFunction; - errorFilter: ErrorFilterFunction; - fs: FileSystemAdapter; - stats: boolean; -}; -export type ErrorFilterFunction = fsWalk.ErrorFilterFunction; -export type EntryFilterFunction = fsWalk.EntryFilterFunction; -export type DeepFilterFunction = fsWalk.DeepFilterFunction; -export type EntryTransformerFunction = (entry: Entry) => EntryItem; -export type MicromatchOptions = { - dot?: boolean; - matchBase?: boolean; - nobrace?: boolean; - nocase?: boolean; - noext?: boolean; - noglobstar?: boolean; - posix?: boolean; - strictSlashes?: boolean; -}; -export type FileSystemAdapter = fsWalk.FileSystemAdapter; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js deleted file mode 100644 index c8ad2e5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/types/index.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts deleted file mode 100644 index 98e7325..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function flatten(items: T[][]): T[]; -export declare function splitWhen(items: T[], predicate: (item: T) => boolean): T[][]; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js deleted file mode 100644 index 50c406e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/array.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.splitWhen = exports.flatten = void 0; -function flatten(items) { - return items.reduce((collection, item) => [].concat(collection, item), []); -} -exports.flatten = flatten; -function splitWhen(items, predicate) { - const result = [[]]; - let groupIndex = 0; - for (const item of items) { - if (predicate(item)) { - groupIndex++; - result[groupIndex] = []; - } - else { - result[groupIndex].push(item); - } - } - return result; -} -exports.splitWhen = splitWhen; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts deleted file mode 100644 index 1c08d3b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { ErrnoException } from '../types'; -export declare function isEnoentCodeError(error: ErrnoException): boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js deleted file mode 100644 index f0bd801..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/errno.js +++ /dev/null @@ -1,7 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEnoentCodeError = void 0; -function isEnoentCodeError(error) { - return error.code === 'ENOENT'; -} -exports.isEnoentCodeError = isEnoentCodeError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts deleted file mode 100644 index 64c61ce..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -import * as fs from 'fs'; -import { Dirent } from '@nodelib/fs.walk'; -export declare function createDirentFromStats(name: string, stats: fs.Stats): Dirent; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js deleted file mode 100644 index ace7c74..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/fs.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createDirentFromStats = void 0; -class DirentFromStats { - constructor(name, stats) { - this.name = name; - this.isBlockDevice = stats.isBlockDevice.bind(stats); - this.isCharacterDevice = stats.isCharacterDevice.bind(stats); - this.isDirectory = stats.isDirectory.bind(stats); - this.isFIFO = stats.isFIFO.bind(stats); - this.isFile = stats.isFile.bind(stats); - this.isSocket = stats.isSocket.bind(stats); - this.isSymbolicLink = stats.isSymbolicLink.bind(stats); - } -} -function createDirentFromStats(name, stats) { - return new DirentFromStats(name, stats); -} -exports.createDirentFromStats = createDirentFromStats; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts deleted file mode 100644 index f634cad..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import * as array from './array'; -import * as errno from './errno'; -import * as fs from './fs'; -import * as path from './path'; -import * as pattern from './pattern'; -import * as stream from './stream'; -import * as string from './string'; -export { array, errno, fs, path, pattern, stream, string }; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js deleted file mode 100644 index 0f92c16..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/index.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.string = exports.stream = exports.pattern = exports.path = exports.fs = exports.errno = exports.array = void 0; -const array = require("./array"); -exports.array = array; -const errno = require("./errno"); -exports.errno = errno; -const fs = require("./fs"); -exports.fs = fs; -const path = require("./path"); -exports.path = path; -const pattern = require("./pattern"); -exports.pattern = pattern; -const stream = require("./stream"); -exports.stream = stream; -const string = require("./string"); -exports.string = string; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts deleted file mode 100644 index 0b13f4b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Pattern } from '../types'; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -export declare function unixify(filepath: string): string; -export declare function makeAbsolute(cwd: string, filepath: string): string; -export declare function removeLeadingDotSegment(entry: string): string; -export declare const escape: typeof escapeWindowsPath; -export declare function escapeWindowsPath(pattern: Pattern): Pattern; -export declare function escapePosixPath(pattern: Pattern): Pattern; -export declare const convertPathToPattern: typeof convertWindowsPathToPattern; -export declare function convertWindowsPathToPattern(filepath: string): Pattern; -export declare function convertPosixPathToPattern(filepath: string): Pattern; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js deleted file mode 100644 index 7b53b39..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/path.js +++ /dev/null @@ -1,68 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.convertPosixPathToPattern = exports.convertWindowsPathToPattern = exports.convertPathToPattern = exports.escapePosixPath = exports.escapeWindowsPath = exports.escape = exports.removeLeadingDotSegment = exports.makeAbsolute = exports.unixify = void 0; -const os = require("os"); -const path = require("path"); -const IS_WINDOWS_PLATFORM = os.platform() === 'win32'; -const LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; // ./ or .\\ -/** - * All non-escaped special characters. - * Posix: ()*?[]{|}, !+@ before (, ! at the beginning, \\ before non-special characters. - * Windows: (){}[], !+@ before (, ! at the beginning. - */ -const POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; -const WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()[\]{}]|^!|[!+@](?=\())/g; -/** - * The device path (\\.\ or \\?\). - * https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#dos-device-paths - */ -const DOS_DEVICE_PATH_RE = /^\\\\([.?])/; -/** - * All backslashes except those escaping special characters. - * Windows: !()+@{} - * https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions - */ -const WINDOWS_BACKSLASHES_RE = /\\(?![!()+@[\]{}])/g; -/** - * Designed to work only with simple paths: `dir\\file`. - */ -function unixify(filepath) { - return filepath.replace(/\\/g, '/'); -} -exports.unixify = unixify; -function makeAbsolute(cwd, filepath) { - return path.resolve(cwd, filepath); -} -exports.makeAbsolute = makeAbsolute; -function removeLeadingDotSegment(entry) { - // We do not use `startsWith` because this is 10x slower than current implementation for some cases. - // eslint-disable-next-line @typescript-eslint/prefer-string-starts-ends-with - if (entry.charAt(0) === '.') { - const secondCharactery = entry.charAt(1); - if (secondCharactery === '/' || secondCharactery === '\\') { - return entry.slice(LEADING_DOT_SEGMENT_CHARACTERS_COUNT); - } - } - return entry; -} -exports.removeLeadingDotSegment = removeLeadingDotSegment; -exports.escape = IS_WINDOWS_PLATFORM ? escapeWindowsPath : escapePosixPath; -function escapeWindowsPath(pattern) { - return pattern.replace(WINDOWS_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapeWindowsPath = escapeWindowsPath; -function escapePosixPath(pattern) { - return pattern.replace(POSIX_UNESCAPED_GLOB_SYMBOLS_RE, '\\$2'); -} -exports.escapePosixPath = escapePosixPath; -exports.convertPathToPattern = IS_WINDOWS_PLATFORM ? convertWindowsPathToPattern : convertPosixPathToPattern; -function convertWindowsPathToPattern(filepath) { - return escapeWindowsPath(filepath) - .replace(DOS_DEVICE_PATH_RE, '//$1') - .replace(WINDOWS_BACKSLASHES_RE, '/'); -} -exports.convertWindowsPathToPattern = convertWindowsPathToPattern; -function convertPosixPathToPattern(filepath) { - return escapePosixPath(filepath); -} -exports.convertPosixPathToPattern = convertPosixPathToPattern; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts deleted file mode 100644 index e3598a9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { MicromatchOptions, Pattern, PatternRe } from '../types'; -type PatternTypeOptions = { - braceExpansion?: boolean; - caseSensitiveMatch?: boolean; - extglob?: boolean; -}; -export declare function isStaticPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function isDynamicPattern(pattern: Pattern, options?: PatternTypeOptions): boolean; -export declare function convertToPositivePattern(pattern: Pattern): Pattern; -export declare function convertToNegativePattern(pattern: Pattern): Pattern; -export declare function isNegativePattern(pattern: Pattern): boolean; -export declare function isPositivePattern(pattern: Pattern): boolean; -export declare function getNegativePatterns(patterns: Pattern[]): Pattern[]; -export declare function getPositivePatterns(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsInsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -export declare function getPatternsOutsideCurrentDirectory(patterns: Pattern[]): Pattern[]; -export declare function isPatternRelatedToParentDirectory(pattern: Pattern): boolean; -export declare function getBaseDirectory(pattern: Pattern): string; -export declare function hasGlobStar(pattern: Pattern): boolean; -export declare function endsWithSlashGlobStar(pattern: Pattern): boolean; -export declare function isAffectDepthOfReadingPattern(pattern: Pattern): boolean; -export declare function expandPatternsWithBraceExpansion(patterns: Pattern[]): Pattern[]; -export declare function expandBraceExpansion(pattern: Pattern): Pattern[]; -export declare function getPatternParts(pattern: Pattern, options: MicromatchOptions): Pattern[]; -export declare function makeRe(pattern: Pattern, options: MicromatchOptions): PatternRe; -export declare function convertPatternsToRe(patterns: Pattern[], options: MicromatchOptions): PatternRe[]; -export declare function matchAny(entry: string, patternsRe: PatternRe[]): boolean; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -export declare function removeDuplicateSlashes(pattern: string): string; -export declare function partitionAbsoluteAndRelative(patterns: Pattern[]): Pattern[][]; -export declare function isAbsolute(pattern: string): boolean; -export {}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js deleted file mode 100644 index b2924e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/pattern.js +++ /dev/null @@ -1,206 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isAbsolute = exports.partitionAbsoluteAndRelative = exports.removeDuplicateSlashes = exports.matchAny = exports.convertPatternsToRe = exports.makeRe = exports.getPatternParts = exports.expandBraceExpansion = exports.expandPatternsWithBraceExpansion = exports.isAffectDepthOfReadingPattern = exports.endsWithSlashGlobStar = exports.hasGlobStar = exports.getBaseDirectory = exports.isPatternRelatedToParentDirectory = exports.getPatternsOutsideCurrentDirectory = exports.getPatternsInsideCurrentDirectory = exports.getPositivePatterns = exports.getNegativePatterns = exports.isPositivePattern = exports.isNegativePattern = exports.convertToNegativePattern = exports.convertToPositivePattern = exports.isDynamicPattern = exports.isStaticPattern = void 0; -const path = require("path"); -const globParent = require("glob-parent"); -const micromatch = require("micromatch"); -const GLOBSTAR = '**'; -const ESCAPE_SYMBOL = '\\'; -const COMMON_GLOB_SYMBOLS_RE = /[*?]|^!/; -const REGEX_CHARACTER_CLASS_SYMBOLS_RE = /\[[^[]*]/; -const REGEX_GROUP_SYMBOLS_RE = /(?:^|[^!*+?@])\([^(]*\|[^|]*\)/; -const GLOB_EXTENSION_SYMBOLS_RE = /[!*+?@]\([^(]*\)/; -const BRACE_EXPANSION_SEPARATORS_RE = /,|\.\./; -/** - * Matches a sequence of two or more consecutive slashes, excluding the first two slashes at the beginning of the string. - * The latter is due to the presence of the device path at the beginning of the UNC path. - */ -const DOUBLE_SLASH_RE = /(?!^)\/{2,}/g; -function isStaticPattern(pattern, options = {}) { - return !isDynamicPattern(pattern, options); -} -exports.isStaticPattern = isStaticPattern; -function isDynamicPattern(pattern, options = {}) { - /** - * A special case with an empty string is necessary for matching patterns that start with a forward slash. - * An empty string cannot be a dynamic pattern. - * For example, the pattern `/lib/*` will be spread into parts: '', 'lib', '*'. - */ - if (pattern === '') { - return false; - } - /** - * When the `caseSensitiveMatch` option is disabled, all patterns must be marked as dynamic, because we cannot check - * filepath directly (without read directory). - */ - if (options.caseSensitiveMatch === false || pattern.includes(ESCAPE_SYMBOL)) { - return true; - } - if (COMMON_GLOB_SYMBOLS_RE.test(pattern) || REGEX_CHARACTER_CLASS_SYMBOLS_RE.test(pattern) || REGEX_GROUP_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.extglob !== false && GLOB_EXTENSION_SYMBOLS_RE.test(pattern)) { - return true; - } - if (options.braceExpansion !== false && hasBraceExpansion(pattern)) { - return true; - } - return false; -} -exports.isDynamicPattern = isDynamicPattern; -function hasBraceExpansion(pattern) { - const openingBraceIndex = pattern.indexOf('{'); - if (openingBraceIndex === -1) { - return false; - } - const closingBraceIndex = pattern.indexOf('}', openingBraceIndex + 1); - if (closingBraceIndex === -1) { - return false; - } - const braceContent = pattern.slice(openingBraceIndex, closingBraceIndex); - return BRACE_EXPANSION_SEPARATORS_RE.test(braceContent); -} -function convertToPositivePattern(pattern) { - return isNegativePattern(pattern) ? pattern.slice(1) : pattern; -} -exports.convertToPositivePattern = convertToPositivePattern; -function convertToNegativePattern(pattern) { - return '!' + pattern; -} -exports.convertToNegativePattern = convertToNegativePattern; -function isNegativePattern(pattern) { - return pattern.startsWith('!') && pattern[1] !== '('; -} -exports.isNegativePattern = isNegativePattern; -function isPositivePattern(pattern) { - return !isNegativePattern(pattern); -} -exports.isPositivePattern = isPositivePattern; -function getNegativePatterns(patterns) { - return patterns.filter(isNegativePattern); -} -exports.getNegativePatterns = getNegativePatterns; -function getPositivePatterns(patterns) { - return patterns.filter(isPositivePattern); -} -exports.getPositivePatterns = getPositivePatterns; -/** - * Returns patterns that can be applied inside the current directory. - * - * @example - * // ['./*', '*', 'a/*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsInsideCurrentDirectory(patterns) { - return patterns.filter((pattern) => !isPatternRelatedToParentDirectory(pattern)); -} -exports.getPatternsInsideCurrentDirectory = getPatternsInsideCurrentDirectory; -/** - * Returns patterns to be expanded relative to (outside) the current directory. - * - * @example - * // ['../*', './../*'] - * getPatternsInsideCurrentDirectory(['./*', '*', 'a/*', '../*', './../*']) - */ -function getPatternsOutsideCurrentDirectory(patterns) { - return patterns.filter(isPatternRelatedToParentDirectory); -} -exports.getPatternsOutsideCurrentDirectory = getPatternsOutsideCurrentDirectory; -function isPatternRelatedToParentDirectory(pattern) { - return pattern.startsWith('..') || pattern.startsWith('./..'); -} -exports.isPatternRelatedToParentDirectory = isPatternRelatedToParentDirectory; -function getBaseDirectory(pattern) { - return globParent(pattern, { flipBackslashes: false }); -} -exports.getBaseDirectory = getBaseDirectory; -function hasGlobStar(pattern) { - return pattern.includes(GLOBSTAR); -} -exports.hasGlobStar = hasGlobStar; -function endsWithSlashGlobStar(pattern) { - return pattern.endsWith('/' + GLOBSTAR); -} -exports.endsWithSlashGlobStar = endsWithSlashGlobStar; -function isAffectDepthOfReadingPattern(pattern) { - const basename = path.basename(pattern); - return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); -} -exports.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; -function expandPatternsWithBraceExpansion(patterns) { - return patterns.reduce((collection, pattern) => { - return collection.concat(expandBraceExpansion(pattern)); - }, []); -} -exports.expandPatternsWithBraceExpansion = expandPatternsWithBraceExpansion; -function expandBraceExpansion(pattern) { - const patterns = micromatch.braces(pattern, { expand: true, nodupes: true, keepEscaping: true }); - /** - * Sort the patterns by length so that the same depth patterns are processed side by side. - * `a/{b,}/{c,}/*` – `['a///*', 'a/b//*', 'a//c/*', 'a/b/c/*']` - */ - patterns.sort((a, b) => a.length - b.length); - /** - * Micromatch can return an empty string in the case of patterns like `{a,}`. - */ - return patterns.filter((pattern) => pattern !== ''); -} -exports.expandBraceExpansion = expandBraceExpansion; -function getPatternParts(pattern, options) { - let { parts } = micromatch.scan(pattern, Object.assign(Object.assign({}, options), { parts: true })); - /** - * The scan method returns an empty array in some cases. - * See micromatch/picomatch#58 for more details. - */ - if (parts.length === 0) { - parts = [pattern]; - } - /** - * The scan method does not return an empty part for the pattern with a forward slash. - * This is another part of micromatch/picomatch#58. - */ - if (parts[0].startsWith('/')) { - parts[0] = parts[0].slice(1); - parts.unshift(''); - } - return parts; -} -exports.getPatternParts = getPatternParts; -function makeRe(pattern, options) { - return micromatch.makeRe(pattern, options); -} -exports.makeRe = makeRe; -function convertPatternsToRe(patterns, options) { - return patterns.map((pattern) => makeRe(pattern, options)); -} -exports.convertPatternsToRe = convertPatternsToRe; -function matchAny(entry, patternsRe) { - return patternsRe.some((patternRe) => patternRe.test(entry)); -} -exports.matchAny = matchAny; -/** - * This package only works with forward slashes as a path separator. - * Because of this, we cannot use the standard `path.normalize` method, because on Windows platform it will use of backslashes. - */ -function removeDuplicateSlashes(pattern) { - return pattern.replace(DOUBLE_SLASH_RE, '/'); -} -exports.removeDuplicateSlashes = removeDuplicateSlashes; -function partitionAbsoluteAndRelative(patterns) { - const absolute = []; - const relative = []; - for (const pattern of patterns) { - if (isAbsolute(pattern)) { - absolute.push(pattern); - } - else { - relative.push(pattern); - } - } - return [absolute, relative]; -} -exports.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; -function isAbsolute(pattern) { - return path.isAbsolute(pattern); -} -exports.isAbsolute = isAbsolute; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts deleted file mode 100644 index 4daf913..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/// -/// -import { Readable } from 'stream'; -export declare function merge(streams: Readable[]): NodeJS.ReadableStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js deleted file mode 100644 index b32028c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/stream.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.merge = void 0; -const merge2 = require("merge2"); -function merge(streams) { - const mergedStream = merge2(streams); - streams.forEach((stream) => { - stream.once('error', (error) => mergedStream.emit('error', error)); - }); - mergedStream.once('close', () => propagateCloseEventToSources(streams)); - mergedStream.once('end', () => propagateCloseEventToSources(streams)); - return mergedStream; -} -exports.merge = merge; -function propagateCloseEventToSources(streams) { - streams.forEach((stream) => stream.emit('close')); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts deleted file mode 100644 index c884735..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function isString(input: unknown): input is string; -export declare function isEmpty(input: string): boolean; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js deleted file mode 100644 index 76e7ea5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/out/utils/string.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isEmpty = exports.isString = void 0; -function isString(input) { - return typeof input === 'string'; -} -exports.isString = isString; -function isEmpty(input) { - return input === ''; -} -exports.isEmpty = isEmpty; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json deleted file mode 100644 index e910de9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fast-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "fast-glob", - "version": "3.3.3", - "description": "It's a very fast and efficient glob library for Node.js", - "license": "MIT", - "repository": "mrmlnc/fast-glob", - "author": { - "name": "Denis Malinochkin", - "url": "https://mrmlnc.com" - }, - "engines": { - "node": ">=8.6.0" - }, - "main": "out/index.js", - "typings": "out/index.d.ts", - "files": [ - "out", - "!out/{benchmark,tests}", - "!out/**/*.map", - "!out/**/*.spec.*" - ], - "keywords": [ - "glob", - "patterns", - "fast", - "implementation" - ], - "devDependencies": { - "@nodelib/fs.macchiato": "^1.0.1", - "@types/glob-parent": "^5.1.0", - "@types/merge2": "^1.1.4", - "@types/micromatch": "^4.0.0", - "@types/mocha": "^5.2.7", - "@types/node": "^14.18.53", - "@types/picomatch": "^2.3.0", - "@types/sinon": "^7.5.0", - "bencho": "^0.1.1", - "eslint": "^6.5.1", - "eslint-config-mrmlnc": "^1.1.0", - "execa": "^7.1.1", - "fast-glob": "^3.0.4", - "fdir": "6.0.1", - "glob": "^10.0.0", - "hereby": "^1.8.1", - "mocha": "^6.2.1", - "rimraf": "^5.0.0", - "sinon": "^7.5.0", - "snap-shot-it": "^7.9.10", - "typescript": "^4.9.5" - }, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "scripts": { - "clean": "rimraf out", - "lint": "eslint \"src/**/*.ts\" --cache", - "compile": "tsc", - "test": "mocha \"out/**/*.spec.js\" -s 0", - "test:e2e": "mocha \"out/**/*.e2e.js\" -s 0", - "test:e2e:sync": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(sync\\)\"", - "test:e2e:async": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(async\\)\"", - "test:e2e:stream": "mocha \"out/**/*.e2e.js\" -s 0 --grep \"\\(stream\\)\"", - "build": "npm run clean && npm run compile && npm run lint && npm test", - "watch": "npm run clean && npm run compile -- -- --sourceMap --watch", - "bench:async": "npm run bench:product:async && npm run bench:regression:async", - "bench:stream": "npm run bench:product:stream && npm run bench:regression:stream", - "bench:sync": "npm run bench:product:sync && npm run bench:regression:sync", - "bench:product": "npm run bench:product:async && npm run bench:product:sync && npm run bench:product:stream", - "bench:product:async": "hereby bench:product:async", - "bench:product:sync": "hereby bench:product:sync", - "bench:product:stream": "hereby bench:product:stream", - "bench:regression": "npm run bench:regression:async && npm run bench:regression:sync && npm run bench:regression:stream", - "bench:regression:async": "hereby bench:regression:async", - "bench:regression:sync": "hereby bench:regression:sync", - "bench:regression:stream": "hereby bench:regression:stream" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml deleted file mode 100644 index 7e7cbe1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/dependabot.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 - ignore: - - dependency-name: standard - versions: - - 16.0.3 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml deleted file mode 100644 index 09dc7a3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/.github/workflows/ci.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: ci - -on: [push, pull_request] - -jobs: - legacy: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install --production && npm install tape - - - name: Run tests - run: | - npm run legacy - - test: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [18.x, 20.x, 22.x] - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Run tests - run: | - npm run test - - types: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v3 - with: - node-version: 16 - - - name: Install - run: | - npm install - - - name: Run types tests - run: | - npm run typescript diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE deleted file mode 100644 index 27c7bb4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2015-2020, Matteo Collina - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md deleted file mode 100644 index 1644111..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/README.md +++ /dev/null @@ -1,312 +0,0 @@ -# fastq - -![ci][ci-url] -[![npm version][npm-badge]][npm-url] - -Fast, in memory work queue. - -Benchmarks (1 million tasks): - -* setImmediate: 812ms -* fastq: 854ms -* async.queue: 1298ms -* neoAsync.queue: 1249ms - -Obtained on node 12.16.1, on a dedicated server. - -If you need zero-overhead series function call, check out -[fastseries](http://npm.im/fastseries). For zero-overhead parallel -function call, check out [fastparallel](http://npm.im/fastparallel). - -[![js-standard-style](https://raw.githubusercontent.com/feross/standard/master/badge.png)](https://github.com/feross/standard) - - * Installation - * Usage - * API - * Licence & copyright - -## Install - -`npm i fastq --save` - -## Usage (callback API) - -```js -'use strict' - -const queue = require('fastq')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, arg * 2) -} -``` - -## Usage (promise API) - -```js -const queue = require('fastq').promise(worker, 1) - -async function worker (arg) { - return arg * 2 -} - -async function run () { - const result = await queue.push(42) - console.log('the result is', result) -} - -run() -``` - -### Setting "this" - -```js -'use strict' - -const that = { hello: 'world' } -const queue = require('fastq')(that, worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log(this) - console.log('the result is', result) -}) - -function worker (arg, cb) { - console.log(this) - cb(null, arg * 2) -} -``` - -### Using with TypeScript (callback API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queue, done } from "fastq"; - -type Task = { - id: number -} - -const q: queue = fastq(worker, 1) - -q.push({ id: 42}) - -function worker (arg: Task, cb: done) { - console.log(arg.id) - cb(null) -} -``` - -### Using with TypeScript (promise API) - -```ts -'use strict' - -import * as fastq from "fastq"; -import type { queueAsPromised } from "fastq"; - -type Task = { - id: number -} - -const q: queueAsPromised = fastq.promise(asyncWorker, 1) - -q.push({ id: 42}).catch((err) => console.error(err)) - -async function asyncWorker (arg: Task): Promise { - // No need for a try-catch block, fastq handles errors automatically - console.log(arg.id) -} -``` - -## API - -* fastqueue() -* queue#push() -* queue#unshift() -* queue#pause() -* queue#resume() -* queue#idle() -* queue#length() -* queue#getQueue() -* queue#kill() -* queue#killAndDrain() -* queue#error() -* queue#concurrency -* queue#drain -* queue#empty -* queue#saturated -* fastqueue.promise() - -------------------------------------------------------- - -### fastqueue([that], worker, concurrency) - -Creates a new queue. - -Arguments: - -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - -------------------------------------------------------- - -### queue.push(task, done) - -Add a task at the end of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.unshift(task, done) - -Add a task at the beginning of the queue. `done(err, result)` will be called -when the task was processed. - -------------------------------------------------------- - -### queue.pause() - -Pause the processing of tasks. Currently worked tasks are not -stopped. - -------------------------------------------------------- - -### queue.resume() - -Resume the processing of tasks. - -------------------------------------------------------- - -### queue.idle() - -Returns `false` if there are tasks being processed or waiting to be processed. -`true` otherwise. - -------------------------------------------------------- - -### queue.length() - -Returns the number of tasks waiting to be processed (in the queue). - -------------------------------------------------------- - -### queue.getQueue() - -Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks - -------------------------------------------------------- - -### queue.kill() - -Removes all tasks waiting to be processed, and reset `drain` to an empty -function. - -------------------------------------------------------- - -### queue.killAndDrain() - -Same than `kill` but the `drain` function will be called before reset to empty. - -------------------------------------------------------- - -### queue.error(handler) - -Set a global error handler. `handler(err, task)` will be called -each time a task is completed, `err` will be not null if the task has thrown an error. - -------------------------------------------------------- - -### queue.concurrency - -Property that returns the number of concurrent tasks that could be executed in -parallel. It can be altered at runtime. - -------------------------------------------------------- - -### queue.paused - -Property (Read-Only) that returns `true` when the queue is in a paused state. - -------------------------------------------------------- - -### queue.drain - -Function that will be called when the last -item from the queue has been processed by a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.empty - -Function that will be called when the last -item from the queue has been assigned to a worker. -It can be altered at runtime. - -------------------------------------------------------- - -### queue.saturated - -Function that will be called when the queue hits the concurrency -limit. -It can be altered at runtime. - -------------------------------------------------------- - -### fastqueue.promise([that], worker(arg), concurrency) - -Creates a new queue with `Promise` apis. It also offers all the methods -and properties of the object returned by [`fastqueue`](#fastqueue) with the modified -[`push`](#pushPromise) and [`unshift`](#unshiftPromise) methods. - -Node v10+ is required to use the promisified version. - -Arguments: -* `that`, optional context of the `worker` function. -* `worker`, worker function, it would be called with `that` as `this`, - if that is specified. It MUST return a `Promise`. -* `concurrency`, number of concurrent tasks that could be executed in - parallel. - - -#### queue.push(task) => Promise - -Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.unshift(task) => Promise - -Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) -when the task is completed successfully (unsuccessfully). - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - - -#### queue.drained() => Promise - -Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. - -This promise could be ignored as it will not lead to a `'unhandledRejection'`. - -## License - -ISC - -[ci-url]: https://github.com/mcollina/fastq/workflows/ci/badge.svg -[npm-badge]: https://badge.fury.io/js/fastq.svg -[npm-url]: https://badge.fury.io/js/fastq diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md deleted file mode 100644 index dd9f1d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/SECURITY.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security Policy - -## Supported Versions - -Use this section to tell people about which versions of your project are -currently being supported with security updates. - -| Version | Supported | -| ------- | ------------------ | -| 1.x | :white_check_mark: | -| < 1.0 | :x: | - -## Reporting a Vulnerability - -Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js deleted file mode 100644 index 4eaa829..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/bench.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -const max = 1000000 -const fastqueue = require('./')(worker, 1) -const { promisify } = require('util') -const immediate = promisify(setImmediate) -const qPromise = require('./').promise(immediate, 1) -const async = require('async') -const neo = require('neo-async') -const asyncqueue = async.queue(worker, 1) -const neoqueue = neo.queue(worker, 1) - -function bench (func, done) { - const key = max + '*' + func.name - let count = -1 - - console.time(key) - end() - - function end () { - if (++count < max) { - func(end) - } else { - console.timeEnd(key) - if (done) { - done() - } - } - } -} - -function benchFastQ (done) { - fastqueue.push(42, done) -} - -function benchAsyncQueue (done) { - asyncqueue.push(42, done) -} - -function benchNeoQueue (done) { - neoqueue.push(42, done) -} - -function worker (arg, cb) { - setImmediate(cb) -} - -function benchSetImmediate (cb) { - worker(42, cb) -} - -function benchFastQPromise (done) { - qPromise.push(42).then(function () { done() }, done) -} - -function runBench (done) { - async.eachSeries([ - benchSetImmediate, - benchFastQ, - benchNeoQueue, - benchAsyncQueue, - benchFastQPromise - ], bench, done) -} - -runBench(runBench) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js deleted file mode 100644 index 665fdc8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var queue = require('./')(worker, 1) - -queue.push(42, function (err, result) { - if (err) { throw err } - console.log('the result is', result) -}) - -function worker (arg, cb) { - cb(null, 42 * 2) -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs deleted file mode 100644 index 81be789..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/example.mjs +++ /dev/null @@ -1,11 +0,0 @@ -import { promise as queueAsPromised } from './queue.js' - -/* eslint-disable */ - -const queue = queueAsPromised(worker, 1) - -console.log('the result is', await queue.push(42)) - -async function worker (arg) { - return 42 * 2 -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts deleted file mode 100644 index 817cdb5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/index.d.ts +++ /dev/null @@ -1,57 +0,0 @@ -declare function fastq(context: C, worker: fastq.worker, concurrency: number): fastq.queue -declare function fastq(worker: fastq.worker, concurrency: number): fastq.queue - -declare namespace fastq { - type worker = (this: C, task: T, cb: fastq.done) => void - type asyncWorker = (this: C, task: T) => Promise - type done = (err: Error | null, result?: R) => void - type errorHandler = (err: Error, task: T) => void - - interface queue { - /** Add a task at the end of the queue. `done(err, result)` will be called when the task was processed. */ - push(task: T, done?: done): void - /** Add a task at the beginning of the queue. `done(err, result)` will be called when the task was processed. */ - unshift(task: T, done?: done): void - /** Pause the processing of tasks. Currently worked tasks are not stopped. */ - pause(): any - /** Resume the processing of tasks. */ - resume(): any - running(): number - /** Returns `false` if there are tasks being processed or waiting to be processed. `true` otherwise. */ - idle(): boolean - /** Returns the number of tasks waiting to be processed (in the queue). */ - length(): number - /** Returns all the tasks be processed (in the queue). Returns empty array when there are no tasks */ - getQueue(): T[] - /** Removes all tasks waiting to be processed, and reset `drain` to an empty function. */ - kill(): any - /** Same than `kill` but the `drain` function will be called before reset to empty. */ - killAndDrain(): any - /** Set a global error handler. `handler(err, task)` will be called each time a task is completed, `err` will be not null if the task has thrown an error. */ - error(handler: errorHandler): void - /** Property that returns the number of concurrent tasks that could be executed in parallel. It can be altered at runtime. */ - concurrency: number - /** Property (Read-Only) that returns `true` when the queue is in a paused state. */ - readonly paused: boolean - /** Function that will be called when the last item from the queue has been processed by a worker. It can be altered at runtime. */ - drain(): any - /** Function that will be called when the last item from the queue has been assigned to a worker. It can be altered at runtime. */ - empty: () => void - /** Function that will be called when the queue hits the concurrency limit. It can be altered at runtime. */ - saturated: () => void - } - - interface queueAsPromised extends queue { - /** Add a task at the end of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ - push(task: T): Promise - /** Add a task at the beginning of the queue. The returned `Promise` will be fulfilled (rejected) when the task is completed successfully (unsuccessfully). */ - unshift(task: T): Promise - /** Wait for the queue to be drained. The returned `Promise` will be resolved when all tasks in the queue have been processed by a worker. */ - drained(): Promise - } - - function promise(context: C, worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised - function promise(worker: fastq.asyncWorker, concurrency: number): fastq.queueAsPromised -} - -export = fastq diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json deleted file mode 100644 index 989151f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/package.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "name": "fastq", - "version": "1.19.1", - "description": "Fast, in memory work queue", - "main": "queue.js", - "scripts": { - "lint": "standard --verbose | snazzy", - "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test/test.js test/promise.js", - "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js test/promise.js", - "test:report": "npm run lint && npm run unit:report", - "test": "npm run lint && npm run unit", - "typescript": "tsc --project ./test/tsconfig.json", - "legacy": "tape test/test.js" - }, - "pre-commit": [ - "test", - "typescript" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/fastq.git" - }, - "keywords": [ - "fast", - "queue", - "async", - "worker" - ], - "author": "Matteo Collina ", - "license": "ISC", - "bugs": { - "url": "https://github.com/mcollina/fastq/issues" - }, - "homepage": "https://github.com/mcollina/fastq#readme", - "devDependencies": { - "async": "^3.1.0", - "neo-async": "^2.6.1", - "nyc": "^17.0.0", - "pre-commit": "^1.2.2", - "snazzy": "^9.0.0", - "standard": "^16.0.0", - "tape": "^5.0.0", - "typescript": "^5.0.4" - }, - "dependencies": { - "reusify": "^1.0.4" - }, - "standard": { - "ignore": [ - "example.mjs" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js deleted file mode 100644 index 7ea8a31..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/queue.js +++ /dev/null @@ -1,311 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var reusify = require('reusify') - -function fastqueue (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - if (!(_concurrency >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - - var cache = reusify(Task) - var queueHead = null - var queueTail = null - var _running = 0 - var errorHandler = null - - var self = { - push: push, - drain: noop, - saturated: noop, - pause: pause, - paused: false, - - get concurrency () { - return _concurrency - }, - set concurrency (value) { - if (!(value >= 1)) { - throw new Error('fastqueue concurrency must be equal to or greater than 1') - } - _concurrency = value - - if (self.paused) return - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - }, - - running: running, - resume: resume, - idle: idle, - length: length, - getQueue: getQueue, - unshift: unshift, - empty: noop, - kill: kill, - killAndDrain: killAndDrain, - error: error - } - - return self - - function running () { - return _running - } - - function pause () { - self.paused = true - } - - function length () { - var current = queueHead - var counter = 0 - - while (current) { - current = current.next - counter++ - } - - return counter - } - - function getQueue () { - var current = queueHead - var tasks = [] - - while (current) { - tasks.push(current.value) - current = current.next - } - - return tasks - } - - function resume () { - if (!self.paused) return - self.paused = false - if (queueHead === null) { - _running++ - release() - return - } - for (; queueHead && _running < _concurrency;) { - _running++ - release() - } - } - - function idle () { - return _running === 0 && self.length() === 0 - } - - function push (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueTail) { - queueTail.next = current - queueTail = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function unshift (value, done) { - var current = cache.get() - - current.context = context - current.release = release - current.value = value - current.callback = done || noop - current.errorHandler = errorHandler - - if (_running >= _concurrency || self.paused) { - if (queueHead) { - current.next = queueHead - queueHead = current - } else { - queueHead = current - queueTail = current - self.saturated() - } - } else { - _running++ - worker.call(context, current.value, current.worked) - } - } - - function release (holder) { - if (holder) { - cache.release(holder) - } - var next = queueHead - if (next && _running <= _concurrency) { - if (!self.paused) { - if (queueTail === queueHead) { - queueTail = null - } - queueHead = next.next - next.next = null - worker.call(context, next.value, next.worked) - if (queueTail === null) { - self.empty() - } - } else { - _running-- - } - } else if (--_running === 0) { - self.drain() - } - } - - function kill () { - queueHead = null - queueTail = null - self.drain = noop - } - - function killAndDrain () { - queueHead = null - queueTail = null - self.drain() - self.drain = noop - } - - function error (handler) { - errorHandler = handler - } -} - -function noop () {} - -function Task () { - this.value = null - this.callback = noop - this.next = null - this.release = noop - this.context = null - this.errorHandler = null - - var self = this - - this.worked = function worked (err, result) { - var callback = self.callback - var errorHandler = self.errorHandler - var val = self.value - self.value = null - self.callback = noop - if (self.errorHandler) { - errorHandler(err, val) - } - callback.call(self.context, err, result) - self.release(self) - } -} - -function queueAsPromised (context, worker, _concurrency) { - if (typeof context === 'function') { - _concurrency = worker - worker = context - context = null - } - - function asyncWrapper (arg, cb) { - worker.call(this, arg) - .then(function (res) { - cb(null, res) - }, cb) - } - - var queue = fastqueue(context, asyncWrapper, _concurrency) - - var pushCb = queue.push - var unshiftCb = queue.unshift - - queue.push = push - queue.unshift = unshift - queue.drained = drained - - return queue - - function push (value) { - var p = new Promise(function (resolve, reject) { - pushCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function unshift (value) { - var p = new Promise(function (resolve, reject) { - unshiftCb(value, function (err, result) { - if (err) { - reject(err) - return - } - resolve(result) - }) - }) - - // Let's fork the promise chain to - // make the error bubble up to the user but - // not lead to a unhandledRejection - p.catch(noop) - - return p - } - - function drained () { - var p = new Promise(function (resolve) { - process.nextTick(function () { - if (queue.idle()) { - resolve() - } else { - var previousDrain = queue.drain - queue.drain = function () { - if (typeof previousDrain === 'function') previousDrain() - resolve() - queue.drain = previousDrain - } - } - }) - }) - - return p - } -} - -module.exports = fastqueue -module.exports.promise = queueAsPromised diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts deleted file mode 100644 index a47d441..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/example.ts +++ /dev/null @@ -1,83 +0,0 @@ -import * as fastq from '../' -import { promise as queueAsPromised } from '../' - -// Basic example - -const queue = fastq(worker, 1) - -queue.push('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.push('push without cb') - -queue.concurrency - -queue.drain() - -queue.empty = () => undefined - -console.log('the queue tasks are', queue.getQueue()) - -queue.idle() - -queue.kill() - -queue.killAndDrain() - -queue.length - -queue.pause() - -queue.resume() - -queue.running() - -queue.saturated = () => undefined - -queue.unshift('world', (err, result) => { - if (err) throw err - console.log('the result is', result) -}) - -queue.unshift('unshift without cb') - -function worker(task: any, cb: fastq.done) { - cb(null, 'hello ' + task) -} - -// Generics example - -interface GenericsContext { - base: number; -} - -const genericsQueue = fastq({ base: 6 }, genericsWorker, 1) - -genericsQueue.push(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -genericsQueue.unshift(7, (err, done) => { - if (err) throw err - console.log('the result is', done) -}) - -function genericsWorker(this: GenericsContext, task: number, cb: fastq.done) { - cb(null, 'the meaning of life is ' + (this.base * task)) -} - -const queue2 = queueAsPromised(asyncWorker, 1) - -async function asyncWorker(task: any) { - return 'hello ' + task -} - -async function run () { - await queue.push(42) - await queue.unshift(42) -} - -run() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js deleted file mode 100644 index 45349a4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/promise.js +++ /dev/null @@ -1,291 +0,0 @@ -'use strict' - -const test = require('tape') -const buildQueue = require('../').promise -const { promisify } = require('util') -const sleep = promisify(setTimeout) -const immediate = promisify(setImmediate) - -test('concurrency', function (t) { - t.plan(2) - t.throws(buildQueue.bind(null, worker, 0)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - async function worker (arg) { - return true - } -}) - -test('worker execution', async function (t) { - const queue = buildQueue(worker, 1) - - const result = await queue.push(42) - - t.equal(result, true, 'result matches') - - async function worker (arg) { - t.equal(arg, 42) - return true - } -}) - -test('limit', async function (t) { - const queue = buildQueue(worker, 1) - - const [res1, res2] = await Promise.all([queue.push(10), queue.push(0)]) - t.equal(res1, 10, 'the result matches') - t.equal(res2, 0, 'the result matches') - - async function worker (arg) { - await sleep(arg) - return arg - } -}) - -test('multiple executions', async function (t) { - const queue = buildQueue(worker, 1) - const toExec = [1, 2, 3, 4, 5] - const expected = ['a', 'b', 'c', 'd', 'e'] - let count = 0 - - await Promise.all(toExec.map(async function (task, i) { - const result = await queue.push(task) - t.equal(result, expected[i], 'the result matches') - })) - - async function worker (arg) { - t.equal(arg, toExec[count], 'arg matches') - return expected[count++] - } -}) - -test('drained', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length) - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() - - t.equal(count, toExec.length * 2) -}) - -test('drained with exception should not throw', async function (t) { - const queue = buildQueue(worker, 2) - - const toExec = new Array(10).fill(10) - - async function worker () { - throw new Error('foo') - } - - toExec.forEach(function (i) { - queue.push(i) - }) - - await queue.drained() -}) - -test('drained with drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - const toExec = new Array(10).fill(10) - let count = 0 - - async function worker (arg) { - await sleep(arg) - count++ - } - - toExec.forEach(function () { - queue.push() - }) - - await queue.drained() - - t.equal(count, toExec.length) - t.equal(drainCalled, true) -}) - -test('drained while idle should resolve', async function (t) { - const queue = buildQueue(worker, 2) - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() -}) - -test('drained while idle should not call the drain function', async function (t) { - let drainCalled = false - const queue = buildQueue(worker, 2) - - queue.drain = function () { - drainCalled = true - } - - async function worker (arg) { - await sleep(arg) - } - - await queue.drained() - - t.equal(drainCalled, false) -}) - -test('set this', async function (t) { - t.plan(1) - const that = {} - const queue = buildQueue(that, worker, 1) - - await queue.push(42) - - async function worker (arg) { - t.equal(this, that, 'this matches') - } -}) - -test('unshift', async function (t) { - const queue = buildQueue(worker, 1) - const expected = [1, 2, 3, 4] - - await Promise.all([ - queue.push(1), - queue.push(4), - queue.unshift(3), - queue.unshift(2) - ]) - - t.is(expected.length, 0) - - async function worker (arg) { - t.equal(expected.shift(), arg, 'tasks come in order') - } -}) - -test('push with worker throwing error', async function (t) { - t.plan(5) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - try { - await q.push(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('unshift with worker throwing error', async function (t) { - t.plan(2) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - try { - await q.unshift(42) - } catch (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - } -}) - -test('no unhandledRejection (push)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.push(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) - -test('no unhandledRejection (unshift)', async function (t) { - function handleRejection () { - t.fail('unhandledRejection') - } - process.once('unhandledRejection', handleRejection) - const q = buildQueue(async function (task, cb) { - throw new Error('test error') - }, 1) - - q.unshift(42) - - await immediate() - process.removeListener('unhandledRejection', handleRejection) -}) - -test('drained should resolve after async tasks complete', async function (t) { - const logs = [] - - async function processTask () { - await new Promise(resolve => setTimeout(resolve, 0)) - logs.push('processed') - } - - const queue = buildQueue(processTask, 1) - queue.drain = () => logs.push('called drain') - - queue.drained().then(() => logs.push('drained promise resolved')) - - await Promise.all([ - queue.push(), - queue.push(), - queue.push() - ]) - - t.deepEqual(logs, [ - 'processed', - 'processed', - 'processed', - 'called drain', - 'drained promise resolved' - ], 'events happened in correct order') -}) - -test('drained should handle undefined drain function', async function (t) { - const queue = buildQueue(worker, 1) - - async function worker (arg) { - await sleep(10) - return arg - } - - queue.drain = undefined - queue.push(1) - await queue.drained() - - t.pass('drained resolved successfully with undefined drain') -}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js deleted file mode 100644 index 79f0f6c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/test.js +++ /dev/null @@ -1,653 +0,0 @@ -'use strict' - -/* eslint-disable no-var */ - -var test = require('tape') -var buildQueue = require('../') - -test('concurrency', function (t) { - t.plan(6) - t.throws(buildQueue.bind(null, worker, 0)) - t.throws(buildQueue.bind(null, worker, NaN)) - t.doesNotThrow(buildQueue.bind(null, worker, 1)) - - var queue = buildQueue(worker, 1) - t.throws(function () { - queue.concurrency = 0 - }) - t.throws(function () { - queue.concurrency = NaN - }) - t.doesNotThrow(function () { - queue.concurrency = 2 - }) - - function worker (arg, cb) { - cb(null, true) - } -}) - -test('worker execution', function (t) { - t.plan(3) - - var queue = buildQueue(worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - function worker (arg, cb) { - t.equal(arg, 42) - cb(null, true) - } -}) - -test('limit', function (t) { - t.plan(4) - - var expected = [10, 0] - var queue = buildQueue(worker, 1) - - queue.push(10, result) - queue.push(0, result) - - function result (err, arg) { - t.error(err, 'no error') - t.equal(arg, expected.shift(), 'the result matches') - } - - function worker (arg, cb) { - setTimeout(cb, arg, null, arg) - } -}) - -test('multiple executions', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - toExec.forEach(function (task) { - queue.push(task, done) - }) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('multiple executions, one after another', function (t) { - t.plan(15) - - var queue = buildQueue(worker, 1) - var toExec = [1, 2, 3, 4, 5] - var count = 0 - - queue.push(toExec[0], done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, toExec[count - 1], 'the result matches') - if (count < toExec.length) { - queue.push(toExec[count], done) - } - } - - function worker (arg, cb) { - t.equal(arg, toExec[count], 'arg matches') - count++ - setImmediate(cb, null, arg) - } -}) - -test('set this', function (t) { - t.plan(3) - - var that = {} - var queue = buildQueue(that, worker, 1) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(this, that, 'this matches') - }) - - function worker (arg, cb) { - t.equal(this, that, 'this matches') - cb(null, true) - } -}) - -test('drain', function (t) { - t.plan(4) - - var queue = buildQueue(worker, 1) - var worked = false - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.drain = function () { - t.equal(true, worked, 'drained') - } - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - setImmediate(cb, null, true) - } -}) - -test('pause && resume', function (t) { - t.plan(13) - - var queue = buildQueue(worker, 1) - var worked = false - var expected = [42, 24] - - t.notOk(queue.paused, 'it should not be paused') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - - queue.resume() - queue.pause() - queue.resume() - queue.resume() // second resume is a no-op - - function worker (arg, cb) { - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - worked = true - process.nextTick(function () { cb(null, true) }) - } -}) - -test('pause in flight && resume', function (t) { - t.plan(16) - - var queue = buildQueue(worker, 1) - var expected = [42, 24, 12] - - t.notOk(queue.paused, 'it should not be paused') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.ok(queue.paused, 'it should be paused') - process.nextTick(function () { - queue.resume() - queue.pause() - queue.resume() - }) - }) - - queue.push(24, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.push(12, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.paused, 'it should not be paused') - }) - - queue.pause() - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - t.equal(arg, expected.shift()) - process.nextTick(function () { cb(null, true) }) - } -}) - -test('altering concurrency', function (t) { - t.plan(24) - - var queue = buildQueue(worker, 1) - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - queue.pause() - - queue.concurrency = 3 // concurrency changes are ignored while paused - queue.concurrency = 2 - - queue.resume() - - t.equal(queue.running(), 2, '2 jobs running') - - queue.concurrency = 3 - - t.equal(queue.running(), 3, '3 jobs running') - - queue.concurrency = 1 - - t.equal(queue.running(), 3, '3 jobs running') // running jobs can't be killed - - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - queue.push(24, workDone) - - function workDone (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.ok(queue.running() <= queue.concurrency, 'should respect the concurrency') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('idle()', function (t) { - t.plan(12) - - var queue = buildQueue(worker, 1) - - t.ok(queue.idle(), 'queue is idle') - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - t.notOk(queue.idle(), 'queue is not idle') - }) - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - // it will go idle after executing this function - setImmediate(function () { - t.ok(queue.idle(), 'queue is now idle') - }) - }) - - t.notOk(queue.idle(), 'queue is not idle') - - function worker (arg, cb) { - t.notOk(queue.idle(), 'queue is not idle') - t.equal(arg, 42) - setImmediate(cb, null, true) - } -}) - -test('saturated', function (t) { - t.plan(9) - - var queue = buildQueue(worker, 1) - var preworked = 0 - var worked = 0 - - queue.saturated = function () { - t.pass('saturated') - t.equal(preworked, 1, 'started 1 task') - t.equal(worked, 0, 'worked zero task') - } - - queue.push(42, done) - queue.push(42, done) - - function done (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - } - - function worker (arg, cb) { - t.equal(arg, 42) - preworked++ - setImmediate(function () { - worked++ - cb(null, true) - }) - } -}) - -test('length', function (t) { - t.plan(7) - - var queue = buildQueue(worker, 1) - - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.length(), 1, 'one task waiting') - queue.push(42, done) - t.equal(queue.length(), 2, 'two tasks waiting') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('getQueue', function (t) { - t.plan(10) - - var queue = buildQueue(worker, 1) - - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 0, 'nothing waiting') - queue.push(42, done) - t.equal(queue.getQueue().length, 1, 'one task waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - queue.push(43, done) - t.equal(queue.getQueue().length, 2, 'two tasks waiting') - t.equal(queue.getQueue()[0], 42, 'should be equal') - t.equal(queue.getQueue()[1], 43, 'should be equal') - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift', function (t) { - t.plan(8) - - var queue = buildQueue(worker, 1) - var expected = [1, 2, 3, 4] - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - - function done (err, result) { - t.error(err, 'no error') - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('unshift && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.unshift(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('push && empty', function (t) { - t.plan(2) - - var queue = buildQueue(worker, 1) - var completed = false - - queue.pause() - - queue.empty = function () { - t.notOk(completed, 'the task has not completed yet') - } - - queue.push(1, done) - - queue.resume() - - function done (err, result) { - completed = true - t.error(err, 'no error') - } - - function worker (arg, cb) { - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('kill', function (t) { - t.plan(5) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.fail('drain should never be called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.kill() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('killAndDrain', function (t) { - t.plan(6) - - var queue = buildQueue(worker, 1) - var expected = [1] - - var predrain = queue.drain - - queue.drain = function drain () { - t.pass('drain has been called') - } - - queue.push(1, done) - queue.push(4, done) - queue.unshift(3, done) - queue.unshift(2, done) - queue.killAndDrain() - - function done (err, result) { - t.error(err, 'no error') - setImmediate(function () { - t.equal(queue.length(), 0, 'no queued tasks') - t.equal(queue.running(), 0, 'no running tasks') - t.equal(queue.drain, predrain, 'drain is back to default') - }) - } - - function worker (arg, cb) { - t.equal(expected.shift(), arg, 'tasks come in order') - setImmediate(function () { - cb(null, true) - }) - } -}) - -test('pause && idle', function (t) { - t.plan(11) - - var queue = buildQueue(worker, 1) - var worked = false - - t.notOk(queue.paused, 'it should not be paused') - t.ok(queue.idle(), 'should be idle') - - queue.pause() - - queue.push(42, function (err, result) { - t.error(err, 'no error') - t.equal(result, true, 'result matches') - }) - - t.notOk(worked, 'it should be paused') - t.ok(queue.paused, 'it should be paused') - t.notOk(queue.idle(), 'should not be idle') - - queue.resume() - - t.notOk(queue.paused, 'it should not be paused') - t.notOk(queue.idle(), 'it should not be idle') - - function worker (arg, cb) { - t.equal(arg, 42) - worked = true - process.nextTick(cb.bind(null, null, true)) - process.nextTick(function () { - t.ok(queue.idle(), 'is should be idle') - }) - } -}) - -test('push without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.push(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('unshift without cb', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - - queue.unshift(42) - - function worker (arg, cb) { - t.equal(arg, 42) - cb() - } -}) - -test('push with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.push(42, function (err) { - t.ok(err instanceof Error, 'push callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('unshift with worker throwing error', function (t) { - t.plan(5) - var q = buildQueue(function (task, cb) { - cb(new Error('test error'), null) - }, 1) - q.error(function (err, task) { - t.ok(err instanceof Error, 'global error handler should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - t.equal(task, 42, 'The task executed should be passed') - }) - q.unshift(42, function (err) { - t.ok(err instanceof Error, 'unshift callback should catch the error') - t.match(err.message, /test error/, 'error message should be "test error"') - }) -}) - -test('pause/resume should trigger drain event', function (t) { - t.plan(1) - - var queue = buildQueue(worker, 1) - queue.pause() - queue.drain = function () { - t.pass('drain should be called') - } - - function worker (arg, cb) { - cb(null, true) - } - - queue.resume() -}) - -test('paused flag', function (t) { - t.plan(2) - - var queue = buildQueue(function (arg, cb) { - cb(null) - }, 1) - t.equal(queue.paused, false) - queue.pause() - t.equal(queue.paused, true) -}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json deleted file mode 100644 index 66e16e9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fastq/test/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "noEmit": true, - "strict": true - }, - "files": [ - "./example.ts" - ] -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md deleted file mode 100644 index 8d756fe..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/README.md +++ /dev/null @@ -1,237 +0,0 @@ -# fill-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/fill-range.svg?style=flat)](https://www.npmjs.com/package/fill-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![NPM total downloads](https://img.shields.io/npm/dt/fill-range.svg?style=flat)](https://npmjs.org/package/fill-range) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/fill-range.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/fill-range) - -> Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex` - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save fill-range -``` - -## Usage - -Expands numbers and letters, optionally using a `step` as the last argument. _(Numbers may be defined as JavaScript numbers or strings)_. - -```js -const fill = require('fill-range'); -// fill(from, to[, step, options]); - -console.log(fill('1', '10')); //=> ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'] -console.log(fill('1', '10', { toRegex: true })); //=> [1-9]|10 -``` - -**Params** - -* `from`: **{String|Number}** the number or letter to start with -* `to`: **{String|Number}** the number or letter to end with -* `step`: **{String|Number|Object|Function}** Optionally pass a [step](#optionsstep) to use. -* `options`: **{Object|Function}**: See all available [options](#options) - -## Examples - -By default, an array of values is returned. - -**Alphabetical ranges** - -```js -console.log(fill('a', 'e')); //=> ['a', 'b', 'c', 'd', 'e'] -console.log(fill('A', 'E')); //=> [ 'A', 'B', 'C', 'D', 'E' ] -``` - -**Numerical ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill('1', '5')); //=> [ 1, 2, 3, 4, 5 ] -``` - -**Negative ranges** - -Numbers can be defined as actual numbers or strings. - -```js -console.log(fill('-5', '-1')); //=> [ '-5', '-4', '-3', '-2', '-1' ] -console.log(fill('-5', '5')); //=> [ '-5', '-4', '-3', '-2', '-1', '0', '1', '2', '3', '4', '5' ] -``` - -**Steps (increments)** - -```js -// numerical ranges with increments -console.log(fill('0', '25', 4)); //=> [ '0', '4', '8', '12', '16', '20', '24' ] -console.log(fill('0', '25', 5)); //=> [ '0', '5', '10', '15', '20', '25' ] -console.log(fill('0', '25', 6)); //=> [ '0', '6', '12', '18', '24' ] - -// alphabetical ranges with increments -console.log(fill('a', 'z', 4)); //=> [ 'a', 'e', 'i', 'm', 'q', 'u', 'y' ] -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 6)); //=> [ 'a', 'g', 'm', 's', 'y' ] -``` - -## Options - -### options.step - -**Type**: `number` (formatted as a string or number) - -**Default**: `undefined` - -**Description**: The increment to use for the range. Can be used with letters or numbers. - -**Example(s)** - -```js -// numbers -console.log(fill('1', '10', 2)); //=> [ '1', '3', '5', '7', '9' ] -console.log(fill('1', '10', 3)); //=> [ '1', '4', '7', '10' ] -console.log(fill('1', '10', 4)); //=> [ '1', '5', '9' ] - -// letters -console.log(fill('a', 'z', 5)); //=> [ 'a', 'f', 'k', 'p', 'u', 'z' ] -console.log(fill('a', 'z', 7)); //=> [ 'a', 'h', 'o', 'v' ] -console.log(fill('a', 'z', 9)); //=> [ 'a', 'j', 's' ] -``` - -### options.strictRanges - -**Type**: `boolean` - -**Default**: `false` - -**Description**: By default, `null` is returned when an invalid range is passed. Enable this option to throw a `RangeError` on invalid ranges. - -**Example(s)** - -The following are all invalid: - -```js -fill('1.1', '2'); // decimals not supported in ranges -fill('a', '2'); // incompatible range values -fill(1, 10, 'foo'); // invalid "step" argument -``` - -### options.stringify - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Cast all returned values to strings. By default, integers are returned as numbers. - -**Example(s)** - -```js -console.log(fill(1, 5)); //=> [ 1, 2, 3, 4, 5 ] -console.log(fill(1, 5, { stringify: true })); //=> [ '1', '2', '3', '4', '5' ] -``` - -### options.toRegex - -**Type**: `boolean` - -**Default**: `undefined` - -**Description**: Create a regex-compatible source string, instead of expanding values to an array. - -**Example(s)** - -```js -// alphabetical range -console.log(fill('a', 'e', { toRegex: true })); //=> '[a-e]' -// alphabetical with step -console.log(fill('a', 'z', 3, { toRegex: true })); //=> 'a|d|g|j|m|p|s|v|y' -// numerical range -console.log(fill('1', '100', { toRegex: true })); //=> '[1-9]|[1-9][0-9]|100' -// numerical range with zero padding -console.log(fill('000001', '100000', { toRegex: true })); -//=> '0{5}[1-9]|0{4}[1-9][0-9]|0{3}[1-9][0-9]{2}|0{2}[1-9][0-9]{3}|0[1-9][0-9]{4}|100000' -``` - -### options.transform - -**Type**: `function` - -**Default**: `undefined` - -**Description**: Customize each value in the returned array (or [string](#optionstoRegex)). _(you can also pass this function as the last argument to `fill()`)_. - -**Example(s)** - -```js -// add zero padding -console.log(fill(1, 5, value => String(value).padStart(4, '0'))); -//=> ['0001', '0002', '0003', '0004', '0005'] -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 116 | [jonschlinkert](https://github.com/jonschlinkert) | -| 4 | [paulmillr](https://github.com/paulmillr) | -| 2 | [realityking](https://github.com/realityking) | -| 2 | [bluelovers](https://github.com/bluelovers) | -| 1 | [edorivai](https://github.com/edorivai) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 08, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js deleted file mode 100644 index ddb212e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/index.js +++ /dev/null @@ -1,248 +0,0 @@ -/*! - * fill-range - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Licensed under the MIT License. - */ - -'use strict'; - -const util = require('util'); -const toRegexRange = require('to-regex-range'); - -const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); - -const transform = toNumber => { - return value => toNumber === true ? Number(value) : String(value); -}; - -const isValidValue = value => { - return typeof value === 'number' || (typeof value === 'string' && value !== ''); -}; - -const isNumber = num => Number.isInteger(+num); - -const zeros = input => { - let value = `${input}`; - let index = -1; - if (value[0] === '-') value = value.slice(1); - if (value === '0') return false; - while (value[++index] === '0'); - return index > 0; -}; - -const stringify = (start, end, options) => { - if (typeof start === 'string' || typeof end === 'string') { - return true; - } - return options.stringify === true; -}; - -const pad = (input, maxLength, toNumber) => { - if (maxLength > 0) { - let dash = input[0] === '-' ? '-' : ''; - if (dash) input = input.slice(1); - input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0')); - } - if (toNumber === false) { - return String(input); - } - return input; -}; - -const toMaxLen = (input, maxLength) => { - let negative = input[0] === '-' ? '-' : ''; - if (negative) { - input = input.slice(1); - maxLength--; - } - while (input.length < maxLength) input = '0' + input; - return negative ? ('-' + input) : input; -}; - -const toSequence = (parts, options, maxLen) => { - parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0); - - let prefix = options.capture ? '' : '?:'; - let positives = ''; - let negatives = ''; - let result; - - if (parts.positives.length) { - positives = parts.positives.map(v => toMaxLen(String(v), maxLen)).join('|'); - } - - if (parts.negatives.length) { - negatives = `-(${prefix}${parts.negatives.map(v => toMaxLen(String(v), maxLen)).join('|')})`; - } - - if (positives && negatives) { - result = `${positives}|${negatives}`; - } else { - result = positives || negatives; - } - - if (options.wrap) { - return `(${prefix}${result})`; - } - - return result; -}; - -const toRange = (a, b, isNumbers, options) => { - if (isNumbers) { - return toRegexRange(a, b, { wrap: false, ...options }); - } - - let start = String.fromCharCode(a); - if (a === b) return start; - - let stop = String.fromCharCode(b); - return `[${start}-${stop}]`; -}; - -const toRegex = (start, end, options) => { - if (Array.isArray(start)) { - let wrap = options.wrap === true; - let prefix = options.capture ? '' : '?:'; - return wrap ? `(${prefix}${start.join('|')})` : start.join('|'); - } - return toRegexRange(start, end, options); -}; - -const rangeError = (...args) => { - return new RangeError('Invalid range arguments: ' + util.inspect(...args)); -}; - -const invalidRange = (start, end, options) => { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; -}; - -const invalidStep = (step, options) => { - if (options.strictRanges === true) { - throw new TypeError(`Expected step "${step}" to be a number`); - } - return []; -}; - -const fillNumbers = (start, end, step = 1, options = {}) => { - let a = Number(start); - let b = Number(end); - - if (!Number.isInteger(a) || !Number.isInteger(b)) { - if (options.strictRanges === true) throw rangeError([start, end]); - return []; - } - - // fix negative zero - if (a === 0) a = 0; - if (b === 0) b = 0; - - let descending = a > b; - let startString = String(start); - let endString = String(end); - let stepString = String(step); - step = Math.max(Math.abs(step), 1); - - let padded = zeros(startString) || zeros(endString) || zeros(stepString); - let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0; - let toNumber = padded === false && stringify(start, end, options) === false; - let format = options.transform || transform(toNumber); - - if (options.toRegex && step === 1) { - return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options); - } - - let parts = { negatives: [], positives: [] }; - let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num)); - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - if (options.toRegex === true && step > 1) { - push(a); - } else { - range.push(pad(format(a, index), maxLen, toNumber)); - } - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return step > 1 - ? toSequence(parts, options, maxLen) - : toRegex(range, null, { wrap: false, ...options }); - } - - return range; -}; - -const fillLetters = (start, end, step = 1, options = {}) => { - if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) { - return invalidRange(start, end, options); - } - - let format = options.transform || (val => String.fromCharCode(val)); - let a = `${start}`.charCodeAt(0); - let b = `${end}`.charCodeAt(0); - - let descending = a > b; - let min = Math.min(a, b); - let max = Math.max(a, b); - - if (options.toRegex && step === 1) { - return toRange(min, max, false, options); - } - - let range = []; - let index = 0; - - while (descending ? a >= b : a <= b) { - range.push(format(a, index)); - a = descending ? a - step : a + step; - index++; - } - - if (options.toRegex === true) { - return toRegex(range, null, { wrap: false, options }); - } - - return range; -}; - -const fill = (start, end, step, options = {}) => { - if (end == null && isValidValue(start)) { - return [start]; - } - - if (!isValidValue(start) || !isValidValue(end)) { - return invalidRange(start, end, options); - } - - if (typeof step === 'function') { - return fill(start, end, 1, { transform: step }); - } - - if (isObject(step)) { - return fill(start, end, 0, step); - } - - let opts = { ...options }; - if (opts.capture === true) opts.wrap = true; - step = step || opts.step || 1; - - if (!isNumber(step)) { - if (step != null && !isObject(step)) return invalidStep(step, opts); - return fill(start, end, 1, step); - } - - if (isNumber(start) && isNumber(end)) { - return fillNumbers(start, end, step, opts); - } - - return fillLetters(start, end, Math.max(Math.abs(step), 1), opts); -}; - -module.exports = fill; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json deleted file mode 100644 index 582357f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/fill-range/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "fill-range", - "description": "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`", - "version": "7.1.1", - "homepage": "https://github.com/jonschlinkert/fill-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Edo Rivai (edo.rivai.nl)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Paul Miller (paulmillr.com)", - "Rouven Weßling (www.rouvenwessling.de)", - "(https://github.com/wtgtybhertgeghgtwtg)" - ], - "repository": "jonschlinkert/fill-range", - "bugs": { - "url": "https://github.com/jonschlinkert/fill-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "devDependencies": { - "gulp-format-md": "^2.0.0", - "mocha": "^6.1.1", - "nyc": "^15.1.0" - }, - "keywords": [ - "alpha", - "alphabetical", - "array", - "bash", - "brace", - "expand", - "expansion", - "fill", - "glob", - "match", - "matches", - "matching", - "number", - "numerical", - "range", - "ranges", - "regex", - "sh" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js deleted file mode 100644 index 2dd7574..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/buffer-stream.js +++ /dev/null @@ -1,52 +0,0 @@ -'use strict'; -const {PassThrough: PassThroughStream} = require('stream'); - -module.exports = options => { - options = {...options}; - - const {array} = options; - let {encoding} = options; - const isBuffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || isBuffer); - } else { - encoding = encoding || 'utf8'; - } - - if (isBuffer) { - encoding = null; - } - - const stream = new PassThroughStream({objectMode}); - - if (encoding) { - stream.setEncoding(encoding); - } - - let length = 0; - const chunks = []; - - stream.on('data', chunk => { - chunks.push(chunk); - - if (objectMode) { - length = chunks.length; - } else { - length += chunk.length; - } - }); - - stream.getBufferedValue = () => { - if (array) { - return chunks; - } - - return isBuffer ? Buffer.concat(chunks, length) : chunks.join(''); - }; - - stream.getBufferedLength = () => length; - - return stream; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts deleted file mode 100644 index 9485b2b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.d.ts +++ /dev/null @@ -1,105 +0,0 @@ -/// -import {Stream} from 'stream'; - -declare class MaxBufferErrorClass extends Error { - readonly name: 'MaxBufferError'; - constructor(); -} - -declare namespace getStream { - interface Options { - /** - Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `MaxBufferError` error. - - @default Infinity - */ - readonly maxBuffer?: number; - } - - interface OptionsWithEncoding extends Options { - /** - [Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. - - @default 'utf8' - */ - readonly encoding?: EncodingType; - } - - type MaxBufferError = MaxBufferErrorClass; -} - -declare const getStream: { - /** - Get the `stream` as a string. - - @returns A promise that resolves when the end event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. - - @example - ``` - import * as fs from 'fs'; - import getStream = require('get-stream'); - - (async () => { - const stream = fs.createReadStream('unicorn.txt'); - - console.log(await getStream(stream)); - // ,,))))))));, - // __)))))))))))))), - // \|/ -\(((((''''((((((((. - // -*-==//////(('' . `)))))), - // /|\ ))| o ;-. '((((( ,(, - // ( `| / ) ;))))' ,_))^;(~ - // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ - // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ - // ; ''''```` `: `:::|\,__,%% );`'; ~ - // | _ ) / `:|`----' `-' - // ______/\/~ | / / - // /~;;.____/;;' / ___--,-( `;;;/ - // / // _;______;'------~~~~~ /;;/\ / - // // | | / ; \;;,\ - // (<_ | ; /',/-----' _> - // \_| ||_ //~;~~~~~~~~~ - // `\_| (,~~ - // \~\ - // ~~ - })(); - ``` - */ - (stream: Stream, options?: getStream.OptionsWithEncoding): Promise; - - /** - Get the `stream` as a buffer. - - It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. - */ - buffer( - stream: Stream, - options?: getStream.Options - ): Promise; - - /** - Get the `stream` as an array of values. - - It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: - - - When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). - - When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. - - When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. - */ - array( - stream: Stream, - options?: getStream.Options - ): Promise; - array( - stream: Stream, - options: getStream.OptionsWithEncoding<'buffer'> - ): Promise; - array( - stream: Stream, - options: getStream.OptionsWithEncoding - ): Promise; - - MaxBufferError: typeof MaxBufferErrorClass; -}; - -export = getStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js deleted file mode 100644 index 1c5d028..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/index.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; -const {constants: BufferConstants} = require('buffer'); -const stream = require('stream'); -const {promisify} = require('util'); -const bufferStream = require('./buffer-stream'); - -const streamPipelinePromisified = promisify(stream.pipeline); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -async function getStream(inputStream, options) { - if (!inputStream) { - throw new Error('Expected a stream'); - } - - options = { - maxBuffer: Infinity, - ...options - }; - - const {maxBuffer} = options; - const stream = bufferStream(options); - - await new Promise((resolve, reject) => { - const rejectPromise = error => { - // Don't retrieve an oversized buffer. - if (error && stream.getBufferedLength() <= BufferConstants.MAX_LENGTH) { - error.bufferedData = stream.getBufferedValue(); - } - - reject(error); - }; - - (async () => { - try { - await streamPipelinePromisified(inputStream, stream); - resolve(); - } catch (error) { - rejectPromise(error); - } - })(); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }); - - return stream.getBufferedValue(); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, {...options, encoding: 'buffer'}); -module.exports.array = (stream, options) => getStream(stream, {...options, array: true}); -module.exports.MaxBufferError = MaxBufferError; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json deleted file mode 100644 index bd47a75..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "get-stream", - "version": "6.0.1", - "description": "Get a stream as a string, buffer, or array", - "license": "MIT", - "repository": "sindresorhus/get-stream", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=10" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts", - "buffer-stream.js" - ], - "keywords": [ - "get", - "stream", - "promise", - "concat", - "string", - "text", - "buffer", - "read", - "data", - "consume", - "readable", - "readablestream", - "array", - "object" - ], - "devDependencies": { - "@types/node": "^14.0.27", - "ava": "^2.4.0", - "into-stream": "^5.0.0", - "tsd": "^0.13.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md deleted file mode 100644 index 70b01fd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/get-stream/readme.md +++ /dev/null @@ -1,124 +0,0 @@ -# get-stream - -> Get a stream as a string, buffer, or array - -## Install - -``` -$ npm install get-stream -``` - -## Usage - -```js -const fs = require('fs'); -const getStream = require('get-stream'); - -(async () => { - const stream = fs.createReadStream('unicorn.txt'); - - console.log(await getStream(stream)); - /* - ,,))))))));, - __)))))))))))))), - \|/ -\(((((''''((((((((. - -*-==//////(('' . `)))))), - /|\ ))| o ;-. '((((( ,(, - ( `| / ) ;))))' ,_))^;(~ - | | | ,))((((_ _____------~~~-. %,;(;(>';'~ - o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ - ; ''''```` `: `:::|\,__,%% );`'; ~ - | _ ) / `:|`----' `-' - ______/\/~ | / / - /~;;.____/;;' / ___--,-( `;;;/ - / // _;______;'------~~~~~ /;;/\ / - // | | / ; \;;,\ - (<_ | ; /',/-----' _> - \_| ||_ //~;~~~~~~~~~ - `\_| (,~~ - \~\ - ~~ - */ -})(); -``` - -## API - -The methods returns a promise that resolves when the `end` event fires on the stream, indicating that there is no more data to be read. The stream is switched to flowing mode. - -### getStream(stream, options?) - -Get the `stream` as a string. - -#### options - -Type: `object` - -##### encoding - -Type: `string`\ -Default: `'utf8'` - -[Encoding](https://nodejs.org/api/buffer.html#buffer_buffer) of the incoming stream. - -##### maxBuffer - -Type: `number`\ -Default: `Infinity` - -Maximum length of the returned string. If it exceeds this value before the stream ends, the promise will be rejected with a `getStream.MaxBufferError` error. - -### getStream.buffer(stream, options?) - -Get the `stream` as a buffer. - -It honors the `maxBuffer` option as above, but it refers to byte length rather than string length. - -### getStream.array(stream, options?) - -Get the `stream` as an array of values. - -It honors both the `maxBuffer` and `encoding` options. The behavior changes slightly based on the encoding chosen: - -- When `encoding` is unset, it assumes an [object mode stream](https://nodesource.com/blog/understanding-object-streams/) and collects values emitted from `stream` unmodified. In this case `maxBuffer` refers to the number of items in the array (not the sum of their sizes). - -- When `encoding` is set to `buffer`, it collects an array of buffers. `maxBuffer` refers to the summed byte lengths of every buffer in the array. - -- When `encoding` is set to anything else, it collects an array of strings. `maxBuffer` refers to the summed character lengths of every string in the array. - -## Errors - -If the input stream emits an `error` event, the promise will be rejected with the error. The buffered data will be attached to the `bufferedData` property of the error. - -```js -(async () => { - try { - await getStream(streamThatErrorsAtTheEnd('unicorn')); - } catch (error) { - console.log(error.bufferedData); - //=> 'unicorn' - } -})() -``` - -## FAQ - -### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? - -This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, buffer, or array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. - -## Related - -- [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md deleted file mode 100644 index fb9de96..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/CHANGELOG.md +++ /dev/null @@ -1,110 +0,0 @@ -### [5.1.2](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2) (2021-03-06) - - -### Bug Fixes - -* eliminate ReDoS ([#36](https://github.com/gulpjs/glob-parent/issues/36)) ([f923116](https://github.com/gulpjs/glob-parent/commit/f9231168b0041fea3f8f954b3cceb56269fc6366)) - -### [5.1.1](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.1) (2021-01-27) - - -### Bug Fixes - -* unescape exclamation mark ([#26](https://github.com/gulpjs/glob-parent/issues/26)) ([a98874f](https://github.com/gulpjs/glob-parent/commit/a98874f1a59e407f4fb1beb0db4efa8392da60bb)) - -## [5.1.0](https://github.com/gulpjs/glob-parent/compare/v5.0.0...v5.1.0) (2021-01-27) - - -### Features - -* add `flipBackslashes` option to disable auto conversion of slashes (closes [#24](https://github.com/gulpjs/glob-parent/issues/24)) ([#25](https://github.com/gulpjs/glob-parent/issues/25)) ([eecf91d](https://github.com/gulpjs/glob-parent/commit/eecf91d5e3834ed78aee39c4eaaae654d76b87b3)) - -## [5.0.0](https://github.com/gulpjs/glob-parent/compare/v4.0.0...v5.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* Drop support for node <6 & bump dependencies - -### Miscellaneous Chores - -* Drop support for node <6 & bump dependencies ([896c0c0](https://github.com/gulpjs/glob-parent/commit/896c0c00b4e7362f60b96e7fc295ae929245255a)) - -## [4.0.0](https://github.com/gulpjs/glob-parent/compare/v3.1.0...v4.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* question marks are valid path characters on Windows so avoid flagging as a glob when alone -* Update is-glob dependency - -### Features - -* hoist regexps and strings for performance gains ([4a80667](https://github.com/gulpjs/glob-parent/commit/4a80667c69355c76a572a5892b0f133c8e1f457e)) -* question marks are valid path characters on Windows so avoid flagging as a glob when alone ([2a551dd](https://github.com/gulpjs/glob-parent/commit/2a551dd0dc3235e78bf3c94843d4107072d17841)) -* Update is-glob dependency ([e41fcd8](https://github.com/gulpjs/glob-parent/commit/e41fcd895d1f7bc617dba45c9d935a7949b9c281)) - -## [3.1.0](https://github.com/gulpjs/glob-parent/compare/v3.0.1...v3.1.0) (2021-01-27) - - -### Features - -* allow basic win32 backslash use ([272afa5](https://github.com/gulpjs/glob-parent/commit/272afa5fd070fc0f796386a5993d4ee4a846988b)) -* handle extglobs (parentheses) containing separators ([7db1bdb](https://github.com/gulpjs/glob-parent/commit/7db1bdb0756e55fd14619e8ce31aa31b17b117fd)) -* new approach to braces/brackets handling ([8269bd8](https://github.com/gulpjs/glob-parent/commit/8269bd89290d99fac9395a354fb56fdcdb80f0be)) -* pre-process braces/brackets sections ([9ef8a87](https://github.com/gulpjs/glob-parent/commit/9ef8a87f66b1a43d0591e7a8e4fc5a18415ee388)) -* preserve escaped brace/bracket at end of string ([8cfb0ba](https://github.com/gulpjs/glob-parent/commit/8cfb0ba84202d51571340dcbaf61b79d16a26c76)) - - -### Bug Fixes - -* trailing escaped square brackets ([99ec9fe](https://github.com/gulpjs/glob-parent/commit/99ec9fecc60ee488ded20a94dd4f18b4f55c4ccf)) - -### [3.0.1](https://github.com/gulpjs/glob-parent/compare/v3.0.0...v3.0.1) (2021-01-27) - - -### Features - -* use path-dirname ponyfill ([cdbea5f](https://github.com/gulpjs/glob-parent/commit/cdbea5f32a58a54e001a75ddd7c0fccd4776aacc)) - - -### Bug Fixes - -* unescape glob-escaped dirnames on output ([598c533](https://github.com/gulpjs/glob-parent/commit/598c533bdf49c1428bc063aa9b8db40c5a86b030)) - -## [3.0.0](https://github.com/gulpjs/glob-parent/compare/v2.0.0...v3.0.0) (2021-01-27) - - -### ⚠ BREAKING CHANGES - -* update is-glob dependency - -### Features - -* update is-glob dependency ([5c5f8ef](https://github.com/gulpjs/glob-parent/commit/5c5f8efcee362a8e7638cf8220666acd8784f6bd)) - -## [2.0.0](https://github.com/gulpjs/glob-parent/compare/v1.3.0...v2.0.0) (2021-01-27) - - -### Features - -* move up to dirname regardless of glob characters ([f97fb83](https://github.com/gulpjs/glob-parent/commit/f97fb83be2e0a9fc8d3b760e789d2ecadd6aa0c2)) - -## [1.3.0](https://github.com/gulpjs/glob-parent/compare/v1.2.0...v1.3.0) (2021-01-27) - -## [1.2.0](https://github.com/gulpjs/glob-parent/compare/v1.1.0...v1.2.0) (2021-01-27) - - -### Reverts - -* feat: make regex test strings smaller ([dc80fa9](https://github.com/gulpjs/glob-parent/commit/dc80fa9658dca20549cfeba44bbd37d5246fcce0)) - -## [1.1.0](https://github.com/gulpjs/glob-parent/compare/v1.0.0...v1.1.0) (2021-01-27) - - -### Features - -* make regex test strings smaller ([cd83220](https://github.com/gulpjs/glob-parent/commit/cd832208638f45169f986d80fcf66e401f35d233)) - -## 1.0.0 (2021-01-27) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE deleted file mode 100644 index 63222d7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2015, 2019 Elan Shanker - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md deleted file mode 100644 index 36a2793..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/README.md +++ /dev/null @@ -1,137 +0,0 @@ -

- - - -

- -# glob-parent - -[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Azure Pipelines Build Status][azure-pipelines-image]][azure-pipelines-url] [![Travis Build Status][travis-image]][travis-url] [![AppVeyor Build Status][appveyor-image]][appveyor-url] [![Coveralls Status][coveralls-image]][coveralls-url] [![Gitter chat][gitter-image]][gitter-url] - -Extract the non-magic parent path from a glob string. - -## Usage - -```js -var globParent = require('glob-parent'); - -globParent('path/to/*.js'); // 'path/to' -globParent('/root/path/to/*.js'); // '/root/path/to' -globParent('/*.js'); // '/' -globParent('*.js'); // '.' -globParent('**/*.js'); // '.' -globParent('path/{to,from}'); // 'path' -globParent('path/!(to|from)'); // 'path' -globParent('path/?(to|from)'); // 'path' -globParent('path/+(to|from)'); // 'path' -globParent('path/*(to|from)'); // 'path' -globParent('path/@(to|from)'); // 'path' -globParent('path/**/*'); // 'path' - -// if provided a non-glob path, returns the nearest dir -globParent('path/foo/bar.js'); // 'path/foo' -globParent('path/foo/'); // 'path/foo' -globParent('path/foo'); // 'path' (see issue #3 for details) -``` - -## API - -### `globParent(maybeGlobString, [options])` - -Takes a string and returns the part of the path before the glob begins. Be aware of Escaping rules and Limitations below. - -#### options - -```js -{ - // Disables the automatic conversion of slashes for Windows - flipBackslashes: true -} -``` - -## Escaping - -The following characters have special significance in glob patterns and must be escaped if you want them to be treated as regular path characters: - -- `?` (question mark) unless used as a path segment alone -- `*` (asterisk) -- `|` (pipe) -- `(` (opening parenthesis) -- `)` (closing parenthesis) -- `{` (opening curly brace) -- `}` (closing curly brace) -- `[` (opening bracket) -- `]` (closing bracket) - -**Example** - -```js -globParent('foo/[bar]/') // 'foo' -globParent('foo/\\[bar]/') // 'foo/[bar]' -``` - -## Limitations - -### Braces & Brackets -This library attempts a quick and imperfect method of determining which path -parts have glob magic without fully parsing/lexing the pattern. There are some -advanced use cases that can trip it up, such as nested braces where the outer -pair is escaped and the inner one contains a path separator. If you find -yourself in the unlikely circumstance of being affected by this or need to -ensure higher-fidelity glob handling in your library, it is recommended that you -pre-process your input with [expand-braces] and/or [expand-brackets]. - -### Windows -Backslashes are not valid path separators for globs. If a path with backslashes -is provided anyway, for simple cases, glob-parent will replace the path -separator for you and return the non-glob parent path (now with -forward-slashes, which are still valid as Windows path separators). - -This cannot be used in conjunction with escape characters. - -```js -// BAD -globParent('C:\\Program Files \\(x86\\)\\*.ext') // 'C:/Program Files /(x86/)' - -// GOOD -globParent('C:/Program Files\\(x86\\)/*.ext') // 'C:/Program Files (x86)' -``` - -If you are using escape characters for a pattern without path parts (i.e. -relative to `cwd`), prefix with `./` to avoid confusing glob-parent. - -```js -// BAD -globParent('foo \\[bar]') // 'foo ' -globParent('foo \\[bar]*') // 'foo ' - -// GOOD -globParent('./foo \\[bar]') // 'foo [bar]' -globParent('./foo \\[bar]*') // '.' -``` - -## License - -ISC - -[expand-braces]: https://github.com/jonschlinkert/expand-braces -[expand-brackets]: https://github.com/jonschlinkert/expand-brackets - -[downloads-image]: https://img.shields.io/npm/dm/glob-parent.svg -[npm-url]: https://www.npmjs.com/package/glob-parent -[npm-image]: https://img.shields.io/npm/v/glob-parent.svg - -[azure-pipelines-url]: https://dev.azure.com/gulpjs/gulp/_build/latest?definitionId=2&branchName=master -[azure-pipelines-image]: https://dev.azure.com/gulpjs/gulp/_apis/build/status/glob-parent?branchName=master - -[travis-url]: https://travis-ci.org/gulpjs/glob-parent -[travis-image]: https://img.shields.io/travis/gulpjs/glob-parent.svg?label=travis-ci - -[appveyor-url]: https://ci.appveyor.com/project/gulpjs/glob-parent -[appveyor-image]: https://img.shields.io/appveyor/ci/gulpjs/glob-parent.svg?label=appveyor - -[coveralls-url]: https://coveralls.io/r/gulpjs/glob-parent -[coveralls-image]: https://img.shields.io/coveralls/gulpjs/glob-parent/master.svg - -[gitter-url]: https://gitter.im/gulpjs/gulp -[gitter-image]: https://badges.gitter.im/gulpjs/gulp.svg diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js deleted file mode 100644 index 09e257e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -var isGlob = require('is-glob'); -var pathPosixDirname = require('path').posix.dirname; -var isWin32 = require('os').platform() === 'win32'; - -var slash = '/'; -var backslash = /\\/g; -var enclosure = /[\{\[].*[\}\]]$/; -var globby = /(^|[^\\])([\{\[]|\([^\)]+$)/; -var escaped = /\\([\!\*\?\|\[\]\(\)\{\}])/g; - -/** - * @param {string} str - * @param {Object} opts - * @param {boolean} [opts.flipBackslashes=true] - * @returns {string} - */ -module.exports = function globParent(str, opts) { - var options = Object.assign({ flipBackslashes: true }, opts); - - // flip windows path separators - if (options.flipBackslashes && isWin32 && str.indexOf(slash) < 0) { - str = str.replace(backslash, slash); - } - - // special case for strings ending in enclosure containing path separator - if (enclosure.test(str)) { - str += slash; - } - - // preserves full path in case of trailing path separator - str += 'a'; - - // remove path parts that are globby - do { - str = pathPosixDirname(str); - } while (isGlob(str) || globby.test(str)); - - // remove escape chars and return result - return str.replace(escaped, '$1'); -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json deleted file mode 100644 index 125c971..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/glob-parent/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "glob-parent", - "version": "5.1.2", - "description": "Extract the non-magic parent path from a glob string.", - "author": "Gulp Team (https://gulpjs.com/)", - "contributors": [ - "Elan Shanker (https://github.com/es128)", - "Blaine Bublitz " - ], - "repository": "gulpjs/glob-parent", - "license": "ISC", - "engines": { - "node": ">= 6" - }, - "main": "index.js", - "files": [ - "LICENSE", - "index.js" - ], - "scripts": { - "lint": "eslint .", - "pretest": "npm run lint", - "test": "nyc mocha --async-only", - "azure-pipelines": "nyc mocha --async-only --reporter xunit -O output=test.xunit", - "coveralls": "nyc report --reporter=text-lcov | coveralls" - }, - "dependencies": { - "is-glob": "^4.0.1" - }, - "devDependencies": { - "coveralls": "^3.0.11", - "eslint": "^2.13.1", - "eslint-config-gulp": "^3.0.1", - "expect": "^1.20.2", - "mocha": "^6.0.2", - "nyc": "^13.3.0" - }, - "keywords": [ - "glob", - "parent", - "strip", - "path", - "dirname", - "directory", - "base", - "wildcard" - ] -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md deleted file mode 100644 index 70d0392..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/CHANGELOG.md +++ /dev/null @@ -1,11 +0,0 @@ -# 2.1.0 - -## TypeScript types - -- Add [TypeScript definitions](src/main.d.ts) - -# 2.0.0 - -## Breaking changes - -- Minimal supported Node.js version is now `10.17.0` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE deleted file mode 100644 index 9af9492..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 ehmicky - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md deleted file mode 100644 index 2af37c3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/README.md +++ /dev/null @@ -1,165 +0,0 @@ -[![Codecov](https://img.shields.io/codecov/c/github/ehmicky/human-signals.svg?label=tested&logo=codecov)](https://codecov.io/gh/ehmicky/human-signals) -[![Travis](https://img.shields.io/badge/cross-platform-4cc61e.svg?logo=travis)](https://travis-ci.org/ehmicky/human-signals) -[![Node](https://img.shields.io/node/v/human-signals.svg?logo=node.js)](https://www.npmjs.com/package/human-signals) -[![Gitter](https://img.shields.io/gitter/room/ehmicky/human-signals.svg?logo=gitter)](https://gitter.im/ehmicky/human-signals) -[![Twitter](https://img.shields.io/badge/%E2%80%8B-twitter-4cc61e.svg?logo=twitter)](https://twitter.com/intent/follow?screen_name=ehmicky) -[![Medium](https://img.shields.io/badge/%E2%80%8B-medium-4cc61e.svg?logo=medium)](https://medium.com/@ehmicky) - -Human-friendly process signals. - -This is a map of known process signals with some information about each signal. - -Unlike -[`os.constants.signals`](https://nodejs.org/api/os.html#os_signal_constants) -this includes: - -- human-friendly [descriptions](#description) -- [default actions](#action), including whether they [can be prevented](#forced) -- whether the signal is [supported](#supported) by the current OS - -# Example - -```js -const { signalsByName, signalsByNumber } = require('human-signals') - -console.log(signalsByName.SIGINT) -// { -// name: 'SIGINT', -// number: 2, -// description: 'User interruption with CTRL-C', -// supported: true, -// action: 'terminate', -// forced: false, -// standard: 'ansi' -// } - -console.log(signalsByNumber[8]) -// { -// name: 'SIGFPE', -// number: 8, -// description: 'Floating point arithmetic error', -// supported: true, -// action: 'core', -// forced: false, -// standard: 'ansi' -// } -``` - -# Install - -```bash -npm install human-signals -``` - -# Usage - -## signalsByName - -_Type_: `object` - -Object whose keys are signal [names](#name) and values are -[signal objects](#signal). - -## signalsByNumber - -_Type_: `object` - -Object whose keys are signal [numbers](#number) and values are -[signal objects](#signal). - -## signal - -_Type_: `object` - -Signal object with the following properties. - -### name - -_Type_: `string` - -Standard name of the signal, for example `'SIGINT'`. - -### number - -_Type_: `number` - -Code number of the signal, for example `2`. While most `number` are -cross-platform, some are different between different OS. - -### description - -_Type_: `string` - -Human-friendly description for the signal, for example -`'User interruption with CTRL-C'`. - -### supported - -_Type_: `boolean` - -Whether the current OS can handle this signal in Node.js using -[`process.on(name, handler)`](https://nodejs.org/api/process.html#process_signal_events). - -The list of supported signals -[is OS-specific](https://github.com/ehmicky/cross-platform-node-guide/blob/master/docs/6_networking_ipc/signals.md#cross-platform-signals). - -### action - -_Type_: `string`\ -_Enum_: `'terminate'`, `'core'`, `'ignore'`, `'pause'`, `'unpause'` - -What is the default action for this signal when it is not handled. - -### forced - -_Type_: `boolean` - -Whether the signal's default action cannot be prevented. This is `true` for -`SIGTERM`, `SIGKILL` and `SIGSTOP`. - -### standard - -_Type_: `string`\ -_Enum_: `'ansi'`, `'posix'`, `'bsd'`, `'systemv'`, `'other'` - -Which standard defined that signal. - -# Support - -If you found a bug or would like a new feature, _don't hesitate_ to -[submit an issue on GitHub](../../issues). - -For other questions, feel free to -[chat with us on Gitter](https://gitter.im/ehmicky/human-signals). - -Everyone is welcome regardless of personal background. We enforce a -[Code of conduct](CODE_OF_CONDUCT.md) in order to promote a positive and -inclusive environment. - -# Contributing - -This project was made with ❤️. The simplest way to give back is by starring and -sharing it online. - -If the documentation is unclear or has a typo, please click on the page's `Edit` -button (pencil icon) and suggest a correction. - -If you would like to help us fix a bug or add a new feature, please check our -[guidelines](CONTRIBUTING.md). Pull requests are welcome! - -Thanks go to our wonderful contributors: - - - - - - - - - -

ehmicky

💻 🎨 🤔 📖

electrovir

💻
- - - - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js deleted file mode 100644 index 98e8fce..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js +++ /dev/null @@ -1,273 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGNALS=void 0; - -const SIGNALS=[ -{ -name:"SIGHUP", -number:1, -action:"terminate", -description:"Terminal closed", -standard:"posix"}, - -{ -name:"SIGINT", -number:2, -action:"terminate", -description:"User interruption with CTRL-C", -standard:"ansi"}, - -{ -name:"SIGQUIT", -number:3, -action:"core", -description:"User interruption with CTRL-\\", -standard:"posix"}, - -{ -name:"SIGILL", -number:4, -action:"core", -description:"Invalid machine instruction", -standard:"ansi"}, - -{ -name:"SIGTRAP", -number:5, -action:"core", -description:"Debugger breakpoint", -standard:"posix"}, - -{ -name:"SIGABRT", -number:6, -action:"core", -description:"Aborted", -standard:"ansi"}, - -{ -name:"SIGIOT", -number:6, -action:"core", -description:"Aborted", -standard:"bsd"}, - -{ -name:"SIGBUS", -number:7, -action:"core", -description: -"Bus error due to misaligned, non-existing address or paging error", -standard:"bsd"}, - -{ -name:"SIGEMT", -number:7, -action:"terminate", -description:"Command should be emulated but is not implemented", -standard:"other"}, - -{ -name:"SIGFPE", -number:8, -action:"core", -description:"Floating point arithmetic error", -standard:"ansi"}, - -{ -name:"SIGKILL", -number:9, -action:"terminate", -description:"Forced termination", -standard:"posix", -forced:true}, - -{ -name:"SIGUSR1", -number:10, -action:"terminate", -description:"Application-specific signal", -standard:"posix"}, - -{ -name:"SIGSEGV", -number:11, -action:"core", -description:"Segmentation fault", -standard:"ansi"}, - -{ -name:"SIGUSR2", -number:12, -action:"terminate", -description:"Application-specific signal", -standard:"posix"}, - -{ -name:"SIGPIPE", -number:13, -action:"terminate", -description:"Broken pipe or socket", -standard:"posix"}, - -{ -name:"SIGALRM", -number:14, -action:"terminate", -description:"Timeout or timer", -standard:"posix"}, - -{ -name:"SIGTERM", -number:15, -action:"terminate", -description:"Termination", -standard:"ansi"}, - -{ -name:"SIGSTKFLT", -number:16, -action:"terminate", -description:"Stack is empty or overflowed", -standard:"other"}, - -{ -name:"SIGCHLD", -number:17, -action:"ignore", -description:"Child process terminated, paused or unpaused", -standard:"posix"}, - -{ -name:"SIGCLD", -number:17, -action:"ignore", -description:"Child process terminated, paused or unpaused", -standard:"other"}, - -{ -name:"SIGCONT", -number:18, -action:"unpause", -description:"Unpaused", -standard:"posix", -forced:true}, - -{ -name:"SIGSTOP", -number:19, -action:"pause", -description:"Paused", -standard:"posix", -forced:true}, - -{ -name:"SIGTSTP", -number:20, -action:"pause", -description:"Paused using CTRL-Z or \"suspend\"", -standard:"posix"}, - -{ -name:"SIGTTIN", -number:21, -action:"pause", -description:"Background process cannot read terminal input", -standard:"posix"}, - -{ -name:"SIGBREAK", -number:21, -action:"terminate", -description:"User interruption with CTRL-BREAK", -standard:"other"}, - -{ -name:"SIGTTOU", -number:22, -action:"pause", -description:"Background process cannot write to terminal output", -standard:"posix"}, - -{ -name:"SIGURG", -number:23, -action:"ignore", -description:"Socket received out-of-band data", -standard:"bsd"}, - -{ -name:"SIGXCPU", -number:24, -action:"core", -description:"Process timed out", -standard:"bsd"}, - -{ -name:"SIGXFSZ", -number:25, -action:"core", -description:"File too big", -standard:"bsd"}, - -{ -name:"SIGVTALRM", -number:26, -action:"terminate", -description:"Timeout or timer", -standard:"bsd"}, - -{ -name:"SIGPROF", -number:27, -action:"terminate", -description:"Timeout or timer", -standard:"bsd"}, - -{ -name:"SIGWINCH", -number:28, -action:"ignore", -description:"Terminal window size changed", -standard:"bsd"}, - -{ -name:"SIGIO", -number:29, -action:"terminate", -description:"I/O is available", -standard:"other"}, - -{ -name:"SIGPOLL", -number:29, -action:"terminate", -description:"Watched event", -standard:"other"}, - -{ -name:"SIGINFO", -number:29, -action:"ignore", -description:"Request for process information", -standard:"other"}, - -{ -name:"SIGPWR", -number:30, -action:"terminate", -description:"Device running out of power", -standard:"systemv"}, - -{ -name:"SIGSYS", -number:31, -action:"core", -description:"Invalid system call", -standard:"other"}, - -{ -name:"SIGUNUSED", -number:31, -action:"terminate", -description:"Invalid system call", -standard:"other"}];exports.SIGNALS=SIGNALS; -//# sourceMappingURL=core.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map deleted file mode 100644 index cbfce26..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/core.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/core.js"],"names":["SIGNALS","name","number","action","description","standard","forced"],"mappings":";;AAEO,KAAMA,CAAAA,OAAO,CAAG;AACrB;AACEC,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,iBAJf;AAKEC,QAAQ,CAAE,OALZ,CADqB;;AAQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,+BAJf;AAKEC,QAAQ,CAAE,MALZ,CARqB;;AAerB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,gCAJf;AAKEC,QAAQ,CAAE,OALZ,CAfqB;;AAsBrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,MALZ,CAtBqB;;AA6BrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CA7BqB;;AAoCrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,MALZ,CApCqB;;AA2CrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,SAJf;AAKEC,QAAQ,CAAE,KALZ,CA3CqB;;AAkDrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW;AACT,mEALJ;AAMEC,QAAQ,CAAE,KANZ,CAlDqB;;AA0DrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mDAJf;AAKEC,QAAQ,CAAE,OALZ,CA1DqB;;AAiErB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,MALZ,CAjEqB;;AAwErB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,CAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAxEqB;;AAgFrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CAhFqB;;AAuFrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,oBAJf;AAKEC,QAAQ,CAAE,MALZ,CAvFqB;;AA8FrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,OALZ,CA9FqB;;AAqGrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,uBAJf;AAKEC,QAAQ,CAAE,OALZ,CArGqB;;AA4GrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CA5GqB;;AAmHrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,aAJf;AAKEC,QAAQ,CAAE,MALZ,CAnHqB;;AA0HrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,OALZ,CA1HqB;;AAiIrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAjIqB;;AAwIrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8CAJf;AAKEC,QAAQ,CAAE,OALZ,CAxIqB;;AA+IrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,SAHV;AAIEC,WAAW,CAAE,UAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CA/IqB;;AAuJrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,QAJf;AAKEC,QAAQ,CAAE,OALZ;AAMEC,MAAM,CAAE,IANV,CAvJqB;;AA+JrB;AACEL,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oCAJf;AAKEC,QAAQ,CAAE,OALZ,CA/JqB;;AAsKrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,+CAJf;AAKEC,QAAQ,CAAE,OALZ,CAtKqB;;AA6KrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,mCAJf;AAKEC,QAAQ,CAAE,OALZ,CA7KqB;;AAoLrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,OAHV;AAIEC,WAAW,CAAE,oDAJf;AAKEC,QAAQ,CAAE,OALZ,CApLqB;;AA2LrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,kCAJf;AAKEC,QAAQ,CAAE,KALZ,CA3LqB;;AAkMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,mBAJf;AAKEC,QAAQ,CAAE,KALZ,CAlMqB;;AAyMrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,cAJf;AAKEC,QAAQ,CAAE,KALZ,CAzMqB;;AAgNrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAhNqB;;AAuNrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,KALZ,CAvNqB;;AA8NrB;AACEJ,IAAI,CAAE,UADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,8BAJf;AAKEC,QAAQ,CAAE,KALZ,CA9NqB;;AAqOrB;AACEJ,IAAI,CAAE,OADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,kBAJf;AAKEC,QAAQ,CAAE,OALZ,CArOqB;;AA4OrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,eAJf;AAKEC,QAAQ,CAAE,OALZ,CA5OqB;;AAmPrB;AACEJ,IAAI,CAAE,SADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,QAHV;AAIEC,WAAW,CAAE,iCAJf;AAKEC,QAAQ,CAAE,OALZ,CAnPqB;;AA0PrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,6BAJf;AAKEC,QAAQ,CAAE,SALZ,CA1PqB;;AAiQrB;AACEJ,IAAI,CAAE,QADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,MAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAjQqB;;AAwQrB;AACEJ,IAAI,CAAE,WADR;AAEEC,MAAM,CAAE,EAFV;AAGEC,MAAM,CAAE,WAHV;AAIEC,WAAW,CAAE,qBAJf;AAKEC,QAAQ,CAAE,OALZ,CAxQqB,CAAhB,C","sourcesContent":["/* eslint-disable max-lines */\n// List of known process signals with information about them\nexport const SIGNALS = [\n {\n name: 'SIGHUP',\n number: 1,\n action: 'terminate',\n description: 'Terminal closed',\n standard: 'posix',\n },\n {\n name: 'SIGINT',\n number: 2,\n action: 'terminate',\n description: 'User interruption with CTRL-C',\n standard: 'ansi',\n },\n {\n name: 'SIGQUIT',\n number: 3,\n action: 'core',\n description: 'User interruption with CTRL-\\\\',\n standard: 'posix',\n },\n {\n name: 'SIGILL',\n number: 4,\n action: 'core',\n description: 'Invalid machine instruction',\n standard: 'ansi',\n },\n {\n name: 'SIGTRAP',\n number: 5,\n action: 'core',\n description: 'Debugger breakpoint',\n standard: 'posix',\n },\n {\n name: 'SIGABRT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'ansi',\n },\n {\n name: 'SIGIOT',\n number: 6,\n action: 'core',\n description: 'Aborted',\n standard: 'bsd',\n },\n {\n name: 'SIGBUS',\n number: 7,\n action: 'core',\n description:\n 'Bus error due to misaligned, non-existing address or paging error',\n standard: 'bsd',\n },\n {\n name: 'SIGEMT',\n number: 7,\n action: 'terminate',\n description: 'Command should be emulated but is not implemented',\n standard: 'other',\n },\n {\n name: 'SIGFPE',\n number: 8,\n action: 'core',\n description: 'Floating point arithmetic error',\n standard: 'ansi',\n },\n {\n name: 'SIGKILL',\n number: 9,\n action: 'terminate',\n description: 'Forced termination',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGUSR1',\n number: 10,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGSEGV',\n number: 11,\n action: 'core',\n description: 'Segmentation fault',\n standard: 'ansi',\n },\n {\n name: 'SIGUSR2',\n number: 12,\n action: 'terminate',\n description: 'Application-specific signal',\n standard: 'posix',\n },\n {\n name: 'SIGPIPE',\n number: 13,\n action: 'terminate',\n description: 'Broken pipe or socket',\n standard: 'posix',\n },\n {\n name: 'SIGALRM',\n number: 14,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'posix',\n },\n {\n name: 'SIGTERM',\n number: 15,\n action: 'terminate',\n description: 'Termination',\n standard: 'ansi',\n },\n {\n name: 'SIGSTKFLT',\n number: 16,\n action: 'terminate',\n description: 'Stack is empty or overflowed',\n standard: 'other',\n },\n {\n name: 'SIGCHLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'posix',\n },\n {\n name: 'SIGCLD',\n number: 17,\n action: 'ignore',\n description: 'Child process terminated, paused or unpaused',\n standard: 'other',\n },\n {\n name: 'SIGCONT',\n number: 18,\n action: 'unpause',\n description: 'Unpaused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGSTOP',\n number: 19,\n action: 'pause',\n description: 'Paused',\n standard: 'posix',\n forced: true,\n },\n {\n name: 'SIGTSTP',\n number: 20,\n action: 'pause',\n description: 'Paused using CTRL-Z or \"suspend\"',\n standard: 'posix',\n },\n {\n name: 'SIGTTIN',\n number: 21,\n action: 'pause',\n description: 'Background process cannot read terminal input',\n standard: 'posix',\n },\n {\n name: 'SIGBREAK',\n number: 21,\n action: 'terminate',\n description: 'User interruption with CTRL-BREAK',\n standard: 'other',\n },\n {\n name: 'SIGTTOU',\n number: 22,\n action: 'pause',\n description: 'Background process cannot write to terminal output',\n standard: 'posix',\n },\n {\n name: 'SIGURG',\n number: 23,\n action: 'ignore',\n description: 'Socket received out-of-band data',\n standard: 'bsd',\n },\n {\n name: 'SIGXCPU',\n number: 24,\n action: 'core',\n description: 'Process timed out',\n standard: 'bsd',\n },\n {\n name: 'SIGXFSZ',\n number: 25,\n action: 'core',\n description: 'File too big',\n standard: 'bsd',\n },\n {\n name: 'SIGVTALRM',\n number: 26,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGPROF',\n number: 27,\n action: 'terminate',\n description: 'Timeout or timer',\n standard: 'bsd',\n },\n {\n name: 'SIGWINCH',\n number: 28,\n action: 'ignore',\n description: 'Terminal window size changed',\n standard: 'bsd',\n },\n {\n name: 'SIGIO',\n number: 29,\n action: 'terminate',\n description: 'I/O is available',\n standard: 'other',\n },\n {\n name: 'SIGPOLL',\n number: 29,\n action: 'terminate',\n description: 'Watched event',\n standard: 'other',\n },\n {\n name: 'SIGINFO',\n number: 29,\n action: 'ignore',\n description: 'Request for process information',\n standard: 'other',\n },\n {\n name: 'SIGPWR',\n number: 30,\n action: 'terminate',\n description: 'Device running out of power',\n standard: 'systemv',\n },\n {\n name: 'SIGSYS',\n number: 31,\n action: 'core',\n description: 'Invalid system call',\n standard: 'other',\n },\n {\n name: 'SIGUNUSED',\n number: 31,\n action: 'terminate',\n description: 'Invalid system call',\n standard: 'other',\n },\n]\n/* eslint-enable max-lines */\n"],"file":"src/core.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts deleted file mode 100644 index 2dc5ea7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.d.ts +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Object whose keys are signal names and values are signal objects. - */ -export declare const signalsByName: { [signalName: string]: Signal } -/** - * Object whose keys are signal numbers and values are signal objects. - */ -export declare const signalsByNumber: { [signalNumber: string]: Signal } - -export declare type SignalAction = - | 'terminate' - | 'core' - | 'ignore' - | 'pause' - | 'unpause' -export declare type SignalStandard = - | 'ansi' - | 'posix' - | 'bsd' - | 'systemv' - | 'other' - -export declare type Signal = { - /** - * Standard name of the signal, for example 'SIGINT'. - */ - name: string - /** - * Code number of the signal, for example 2. While most number are cross-platform, some are different between different OS. - */ - number: number - /** - * Human-friendly description for the signal, for example 'User interruption with CTRL-C'. - */ - description: string - /** - * Whether the current OS can handle this signal in Node.js using process.on(name, handler). The list of supported signals is OS-specific. - */ - supported: boolean - /** - * What is the default action for this signal when it is not handled. - */ - action: SignalAction - /** - * Whether the signal's default action cannot be prevented. This is true for SIGTERM, SIGKILL and SIGSTOP. - */ - forced: boolean - /** - * Which standard defined that signal. - */ - standard: SignalStandard -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js deleted file mode 100644 index 88f5fd2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.signalsByNumber=exports.signalsByName=void 0;var _os=require("os"); - -var _signals=require("./signals.js"); -var _realtime=require("./realtime.js"); - - - -const getSignalsByName=function(){ -const signals=(0,_signals.getSignals)(); -return signals.reduce(getSignalByName,{}); -}; - -const getSignalByName=function( -signalByNameMemo, -{name,number,description,supported,action,forced,standard}) -{ -return{ -...signalByNameMemo, -[name]:{name,number,description,supported,action,forced,standard}}; - -}; - -const signalsByName=getSignalsByName();exports.signalsByName=signalsByName; - - - - -const getSignalsByNumber=function(){ -const signals=(0,_signals.getSignals)(); -const length=_realtime.SIGRTMAX+1; -const signalsA=Array.from({length},(value,number)=> -getSignalByNumber(number,signals)); - -return Object.assign({},...signalsA); -}; - -const getSignalByNumber=function(number,signals){ -const signal=findSignalByNumber(number,signals); - -if(signal===undefined){ -return{}; -} - -const{name,description,supported,action,forced,standard}=signal; -return{ -[number]:{ -name, -number, -description, -supported, -action, -forced, -standard}}; - - -}; - - - -const findSignalByNumber=function(number,signals){ -const signal=signals.find(({name})=>_os.constants.signals[name]===number); - -if(signal!==undefined){ -return signal; -} - -return signals.find(signalA=>signalA.number===number); -}; - -const signalsByNumber=getSignalsByNumber();exports.signalsByNumber=signalsByNumber; -//# sourceMappingURL=main.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map deleted file mode 100644 index 3fdcede..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/main.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/main.js"],"names":["getSignalsByName","signals","reduce","getSignalByName","signalByNameMemo","name","number","description","supported","action","forced","standard","signalsByName","getSignalsByNumber","length","SIGRTMAX","signalsA","Array","from","value","getSignalByNumber","Object","assign","signal","findSignalByNumber","undefined","find","constants","signalA","signalsByNumber"],"mappings":"2HAAA;;AAEA;AACA;;;;AAIA,KAAMA,CAAAA,gBAAgB,CAAG,UAAW;AAClC,KAAMC,CAAAA,OAAO,CAAG,yBAAhB;AACA,MAAOA,CAAAA,OAAO,CAACC,MAAR,CAAeC,eAAf,CAAgC,EAAhC,CAAP;AACD,CAHD;;AAKA,KAAMA,CAAAA,eAAe,CAAG;AACtBC,gBADsB;AAEtB,CAAEC,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFsB;AAGtB;AACA,MAAO;AACL,GAAGP,gBADE;AAEL,CAACC,IAAD,EAAQ,CAAEA,IAAF,CAAQC,MAAR,CAAgBC,WAAhB,CAA6BC,SAA7B,CAAwCC,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAFH,CAAP;;AAID,CARD;;AAUO,KAAMC,CAAAA,aAAa,CAAGZ,gBAAgB,EAAtC,C;;;;;AAKP,KAAMa,CAAAA,kBAAkB,CAAG,UAAW;AACpC,KAAMZ,CAAAA,OAAO,CAAG,yBAAhB;AACA,KAAMa,CAAAA,MAAM,CAAGC,mBAAW,CAA1B;AACA,KAAMC,CAAAA,QAAQ,CAAGC,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuB,CAACK,KAAD,CAAQb,MAAR;AACtCc,iBAAiB,CAACd,MAAD,CAASL,OAAT,CADF,CAAjB;;AAGA,MAAOoB,CAAAA,MAAM,CAACC,MAAP,CAAc,EAAd,CAAkB,GAAGN,QAArB,CAAP;AACD,CAPD;;AASA,KAAMI,CAAAA,iBAAiB,CAAG,SAASd,MAAT,CAAiBL,OAAjB,CAA0B;AAClD,KAAMsB,CAAAA,MAAM,CAAGC,kBAAkB,CAAClB,MAAD,CAASL,OAAT,CAAjC;;AAEA,GAAIsB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAO,EAAP;AACD;;AAED,KAAM,CAAEpB,IAAF,CAAQE,WAAR,CAAqBC,SAArB,CAAgCC,MAAhC,CAAwCC,MAAxC,CAAgDC,QAAhD,EAA6DY,MAAnE;AACA,MAAO;AACL,CAACjB,MAAD,EAAU;AACRD,IADQ;AAERC,MAFQ;AAGRC,WAHQ;AAIRC,SAJQ;AAKRC,MALQ;AAMRC,MANQ;AAORC,QAPQ,CADL,CAAP;;;AAWD,CAnBD;;;;AAuBA,KAAMa,CAAAA,kBAAkB,CAAG,SAASlB,MAAT,CAAiBL,OAAjB,CAA0B;AACnD,KAAMsB,CAAAA,MAAM,CAAGtB,OAAO,CAACyB,IAAR,CAAa,CAAC,CAAErB,IAAF,CAAD,GAAcsB,cAAU1B,OAAV,CAAkBI,IAAlB,IAA4BC,MAAvD,CAAf;;AAEA,GAAIiB,MAAM,GAAKE,SAAf,CAA0B;AACxB,MAAOF,CAAAA,MAAP;AACD;;AAED,MAAOtB,CAAAA,OAAO,CAACyB,IAAR,CAAaE,OAAO,EAAIA,OAAO,CAACtB,MAAR,GAAmBA,MAA3C,CAAP;AACD,CARD;;AAUO,KAAMuB,CAAAA,eAAe,CAAGhB,kBAAkB,EAA1C,C","sourcesContent":["import { constants } from 'os'\n\nimport { getSignals } from './signals.js'\nimport { SIGRTMAX } from './realtime.js'\n\n// Retrieve `signalsByName`, an object mapping signal name to signal properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByName = function() {\n const signals = getSignals()\n return signals.reduce(getSignalByName, {})\n}\n\nconst getSignalByName = function(\n signalByNameMemo,\n { name, number, description, supported, action, forced, standard },\n) {\n return {\n ...signalByNameMemo,\n [name]: { name, number, description, supported, action, forced, standard },\n }\n}\n\nexport const signalsByName = getSignalsByName()\n\n// Retrieve `signalsByNumber`, an object mapping signal number to signal\n// properties.\n// We make sure the object is sorted by `number`.\nconst getSignalsByNumber = function() {\n const signals = getSignals()\n const length = SIGRTMAX + 1\n const signalsA = Array.from({ length }, (value, number) =>\n getSignalByNumber(number, signals),\n )\n return Object.assign({}, ...signalsA)\n}\n\nconst getSignalByNumber = function(number, signals) {\n const signal = findSignalByNumber(number, signals)\n\n if (signal === undefined) {\n return {}\n }\n\n const { name, description, supported, action, forced, standard } = signal\n return {\n [number]: {\n name,\n number,\n description,\n supported,\n action,\n forced,\n standard,\n },\n }\n}\n\n// Several signals might end up sharing the same number because of OS-specific\n// numbers, in which case those prevail.\nconst findSignalByNumber = function(number, signals) {\n const signal = signals.find(({ name }) => constants.signals[name] === number)\n\n if (signal !== undefined) {\n return signal\n }\n\n return signals.find(signalA => signalA.number === number)\n}\n\nexport const signalsByNumber = getSignalsByNumber()\n"],"file":"src/main.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js deleted file mode 100644 index f665516..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.SIGRTMAX=exports.getRealtimeSignals=void 0; -const getRealtimeSignals=function(){ -const length=SIGRTMAX-SIGRTMIN+1; -return Array.from({length},getRealtimeSignal); -};exports.getRealtimeSignals=getRealtimeSignals; - -const getRealtimeSignal=function(value,index){ -return{ -name:`SIGRT${index+1}`, -number:SIGRTMIN+index, -action:"terminate", -description:"Application-specific signal (realtime)", -standard:"posix"}; - -}; - -const SIGRTMIN=34; -const SIGRTMAX=64;exports.SIGRTMAX=SIGRTMAX; -//# sourceMappingURL=realtime.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map deleted file mode 100644 index 808bbd1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/realtime.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/realtime.js"],"names":["getRealtimeSignals","length","SIGRTMAX","SIGRTMIN","Array","from","getRealtimeSignal","value","index","name","number","action","description","standard"],"mappings":";AACO,KAAMA,CAAAA,kBAAkB,CAAG,UAAW;AAC3C,KAAMC,CAAAA,MAAM,CAAGC,QAAQ,CAAGC,QAAX,CAAsB,CAArC;AACA,MAAOC,CAAAA,KAAK,CAACC,IAAN,CAAW,CAAEJ,MAAF,CAAX,CAAuBK,iBAAvB,CAAP;AACD,CAHM,C;;AAKP,KAAMA,CAAAA,iBAAiB,CAAG,SAASC,KAAT,CAAgBC,KAAhB,CAAuB;AAC/C,MAAO;AACLC,IAAI,CAAG,QAAOD,KAAK,CAAG,CAAE,EADnB;AAELE,MAAM,CAAEP,QAAQ,CAAGK,KAFd;AAGLG,MAAM,CAAE,WAHH;AAILC,WAAW,CAAE,wCAJR;AAKLC,QAAQ,CAAE,OALL,CAAP;;AAOD,CARD;;AAUA,KAAMV,CAAAA,QAAQ,CAAG,EAAjB;AACO,KAAMD,CAAAA,QAAQ,CAAG,EAAjB,C","sourcesContent":["// List of realtime signals with information about them\nexport const getRealtimeSignals = function() {\n const length = SIGRTMAX - SIGRTMIN + 1\n return Array.from({ length }, getRealtimeSignal)\n}\n\nconst getRealtimeSignal = function(value, index) {\n return {\n name: `SIGRT${index + 1}`,\n number: SIGRTMIN + index,\n action: 'terminate',\n description: 'Application-specific signal (realtime)',\n standard: 'posix',\n }\n}\n\nconst SIGRTMIN = 34\nexport const SIGRTMAX = 64\n"],"file":"src/realtime.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js deleted file mode 100644 index ab3b387..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict";Object.defineProperty(exports,"__esModule",{value:true});exports.getSignals=void 0;var _os=require("os"); - -var _core=require("./core.js"); -var _realtime=require("./realtime.js"); - - - -const getSignals=function(){ -const realtimeSignals=(0,_realtime.getRealtimeSignals)(); -const signals=[..._core.SIGNALS,...realtimeSignals].map(normalizeSignal); -return signals; -};exports.getSignals=getSignals; - - - - - - - -const normalizeSignal=function({ -name, -number:defaultNumber, -description, -action, -forced=false, -standard}) -{ -const{ -signals:{[name]:constantSignal}}= -_os.constants; -const supported=constantSignal!==undefined; -const number=supported?constantSignal:defaultNumber; -return{name,number,description,supported,action,forced,standard}; -}; -//# sourceMappingURL=signals.js.map \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map deleted file mode 100644 index 2a6b919..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/build/src/signals.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"sources":["../../src/signals.js"],"names":["getSignals","realtimeSignals","signals","SIGNALS","map","normalizeSignal","name","number","defaultNumber","description","action","forced","standard","constantSignal","constants","supported","undefined"],"mappings":"gGAAA;;AAEA;AACA;;;;AAIO,KAAMA,CAAAA,UAAU,CAAG,UAAW;AACnC,KAAMC,CAAAA,eAAe,CAAG,kCAAxB;AACA,KAAMC,CAAAA,OAAO,CAAG,CAAC,GAAGC,aAAJ,CAAa,GAAGF,eAAhB,EAAiCG,GAAjC,CAAqCC,eAArC,CAAhB;AACA,MAAOH,CAAAA,OAAP;AACD,CAJM,C;;;;;;;;AAYP,KAAMG,CAAAA,eAAe,CAAG,SAAS;AAC/BC,IAD+B;AAE/BC,MAAM,CAAEC,aAFuB;AAG/BC,WAH+B;AAI/BC,MAJ+B;AAK/BC,MAAM,CAAG,KALsB;AAM/BC,QAN+B,CAAT;AAOrB;AACD,KAAM;AACJV,OAAO,CAAE,CAAE,CAACI,IAAD,EAAQO,cAAV,CADL;AAEFC,aAFJ;AAGA,KAAMC,CAAAA,SAAS,CAAGF,cAAc,GAAKG,SAArC;AACA,KAAMT,CAAAA,MAAM,CAAGQ,SAAS,CAAGF,cAAH,CAAoBL,aAA5C;AACA,MAAO,CAAEF,IAAF,CAAQC,MAAR,CAAgBE,WAAhB,CAA6BM,SAA7B,CAAwCL,MAAxC,CAAgDC,MAAhD,CAAwDC,QAAxD,CAAP;AACD,CAdD","sourcesContent":["import { constants } from 'os'\n\nimport { SIGNALS } from './core.js'\nimport { getRealtimeSignals } from './realtime.js'\n\n// Retrieve list of know signals (including realtime) with information about\n// them\nexport const getSignals = function() {\n const realtimeSignals = getRealtimeSignals()\n const signals = [...SIGNALS, ...realtimeSignals].map(normalizeSignal)\n return signals\n}\n\n// Normalize signal:\n// - `number`: signal numbers are OS-specific. This is taken into account by\n// `os.constants.signals`. However we provide a default `number` since some\n// signals are not defined for some OS.\n// - `forced`: set default to `false`\n// - `supported`: set value\nconst normalizeSignal = function({\n name,\n number: defaultNumber,\n description,\n action,\n forced = false,\n standard,\n}) {\n const {\n signals: { [name]: constantSignal },\n } = constants\n const supported = constantSignal !== undefined\n const number = supported ? constantSignal : defaultNumber\n return { name, number, description, supported, action, forced, standard }\n}\n"],"file":"src/signals.js"} \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json deleted file mode 100644 index fd1d027..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/human-signals/package.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "name": "human-signals", - "version": "2.1.0", - "main": "build/src/main.js", - "files": [ - "build/src", - "!~" - ], - "scripts": { - "test": "gulp test" - }, - "husky": { - "hooks": { - "pre-push": "gulp check --full" - } - }, - "description": "Human-friendly process signals", - "keywords": [ - "signal", - "signals", - "handlers", - "error-handling", - "errors", - "interrupts", - "sigterm", - "sigint", - "irq", - "process", - "exit", - "exit-code", - "status", - "operating-system", - "es6", - "javascript", - "linux", - "macos", - "windows", - "nodejs" - ], - "license": "Apache-2.0", - "homepage": "https://git.io/JeluP", - "repository": "ehmicky/human-signals", - "bugs": { - "url": "https://github.com/ehmicky/human-signals/issues" - }, - "author": "ehmicky (https://github.com/ehmicky)", - "directories": { - "lib": "src", - "test": "test" - }, - "types": "build/src/main.d.ts", - "dependencies": {}, - "devDependencies": { - "@ehmicky/dev-tasks": "^0.31.9", - "ajv": "^6.12.0", - "ava": "^3.5.0", - "gulp": "^4.0.2", - "husky": "^4.2.3", - "test-each": "^2.0.0" - }, - "engines": { - "node": ">=10.17.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE deleted file mode 100644 index 842218c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2016, Jon Schlinkert - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md deleted file mode 100644 index 0416af5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# is-extglob [![NPM version](https://img.shields.io/npm/v/is-extglob.svg?style=flat)](https://www.npmjs.com/package/is-extglob) [![NPM downloads](https://img.shields.io/npm/dm/is-extglob.svg?style=flat)](https://npmjs.org/package/is-extglob) [![Build Status](https://img.shields.io/travis/jonschlinkert/is-extglob.svg?style=flat)](https://travis-ci.org/jonschlinkert/is-extglob) - -> Returns true if a string has an extglob. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-extglob -``` - -## Usage - -```js -var isExtglob = require('is-extglob'); -``` - -**True** - -```js -isExtglob('?(abc)'); -isExtglob('@(abc)'); -isExtglob('!(abc)'); -isExtglob('*(abc)'); -isExtglob('+(abc)'); -``` - -**False** - -Escaped extglobs: - -```js -isExtglob('\\?(abc)'); -isExtglob('\\@(abc)'); -isExtglob('\\!(abc)'); -isExtglob('\\*(abc)'); -isExtglob('\\+(abc)'); -``` - -Everything else... - -```js -isExtglob('foo.js'); -isExtglob('!foo.js'); -isExtglob('*.js'); -isExtglob('**/abc.js'); -isExtglob('abc/*.js'); -isExtglob('abc/(aaa|bbb).js'); -isExtglob('abc/[a-z].js'); -isExtglob('abc/{a,b}.js'); -isExtglob('abc/?.js'); -isExtglob('abc.js'); -isExtglob('abc/def/ghi.js'); -``` - -## History - -**v2.0** - -Adds support for escaping. Escaped exglobs no longer return true. - -## About - -### Related projects - -* [has-glob](https://www.npmjs.com/package/has-glob): Returns `true` if an array has a glob pattern. | [homepage](https://github.com/jonschlinkert/has-glob "Returns `true` if an array has a glob pattern.") -* [is-glob](https://www.npmjs.com/package/is-glob): Returns `true` if the given string looks like a glob pattern or an extglob pattern… [more](https://github.com/jonschlinkert/is-glob) | [homepage](https://github.com/jonschlinkert/is-glob "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a bet") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/jonschlinkert/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") - -### Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -### Building docs - -_(This document was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme) (a [verb](https://github.com/verbose/verb) generator), please don't edit the readme directly. Any changes to the readme must be made in [.verb.md](.verb.md).)_ - -To generate the readme and API documentation with [verb](https://github.com/verbose/verb): - -```sh -$ npm install -g verb verb-generate-readme && verb -``` - -### Running tests - -Install dev dependencies: - -```sh -$ npm install -d && npm test -``` - -### Author - -**Jon Schlinkert** - -* [github/jonschlinkert](https://github.com/jonschlinkert) -* [twitter/jonschlinkert](http://twitter.com/jonschlinkert) - -### License - -Copyright © 2016, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT license](https://github.com/jonschlinkert/is-extglob/blob/master/LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.1.31, on October 12, 2016._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js deleted file mode 100644 index c1d986f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/*! - * is-extglob - * - * Copyright (c) 2014-2016, Jon Schlinkert. - * Licensed under the MIT License. - */ - -module.exports = function isExtglob(str) { - if (typeof str !== 'string' || str === '') { - return false; - } - - var match; - while ((match = /(\\).|([@?!+*]\(.*\))/g.exec(str))) { - if (match[2]) return true; - str = str.slice(match.index + match[0].length); - } - - return false; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json deleted file mode 100644 index 7a90836..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-extglob/package.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "name": "is-extglob", - "description": "Returns true if a string has an extglob.", - "version": "2.1.1", - "homepage": "https://github.com/jonschlinkert/is-extglob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "repository": "jonschlinkert/is-extglob", - "bugs": { - "url": "https://github.com/jonschlinkert/is-extglob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "has-glob", - "is-glob", - "micromatch" - ] - }, - "reflinks": [ - "verb", - "verb-generate-readme" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE deleted file mode 100644 index 3f2eca1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2017, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md deleted file mode 100644 index 740724b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/README.md +++ /dev/null @@ -1,206 +0,0 @@ -# is-glob [![NPM version](https://img.shields.io/npm/v/is-glob.svg?style=flat)](https://www.npmjs.com/package/is-glob) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![NPM total downloads](https://img.shields.io/npm/dt/is-glob.svg?style=flat)](https://npmjs.org/package/is-glob) [![Build Status](https://img.shields.io/github/workflow/status/micromatch/is-glob/dev)](https://github.com/micromatch/is-glob/actions) - -> Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-glob -``` - -You might also be interested in [is-valid-glob](https://github.com/jonschlinkert/is-valid-glob) and [has-glob](https://github.com/jonschlinkert/has-glob). - -## Usage - -```js -var isGlob = require('is-glob'); -``` - -### Default behavior - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js'); -isGlob('*.js'); -isGlob('**/abc.js'); -isGlob('abc/*.js'); -isGlob('abc/(aaa|bbb).js'); -isGlob('abc/[a-z].js'); -isGlob('abc/{a,b}.js'); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js'); -isGlob('abc/!(a).js'); -isGlob('abc/+(a).js'); -isGlob('abc/*(a).js'); -isGlob('abc/?(a).js'); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('abc/\\@(a).js'); -isGlob('abc/\\!(a).js'); -isGlob('abc/\\+(a).js'); -isGlob('abc/\\*(a).js'); -isGlob('abc/\\?(a).js'); -isGlob('\\!foo.js'); -isGlob('\\*.js'); -isGlob('\\*\\*/abc.js'); -isGlob('abc/\\*.js'); -isGlob('abc/\\(aaa|bbb).js'); -isGlob('abc/\\[a-z].js'); -isGlob('abc/\\{a,b}.js'); -//=> false -``` - -Patterns that do not have glob patterns return `false`: - -```js -isGlob('abc.js'); -isGlob('abc/def/ghi.js'); -isGlob('foo.js'); -isGlob('abc/@.js'); -isGlob('abc/+.js'); -isGlob('abc/?.js'); -isGlob(); -isGlob(null); -//=> false -``` - -Arrays are also `false` (If you want to check if an array has a glob pattern, use [has-glob](https://github.com/jonschlinkert/has-glob)): - -```js -isGlob(['**/*.js']); -isGlob(['foo.js']); -//=> false -``` - -### Option strict - -When `options.strict === false` the behavior is less strict in determining if a pattern is a glob. Meaning that -some patterns that would return `false` may return `true`. This is done so that matching libraries like [micromatch](https://github.com/micromatch/micromatch) have a chance at determining if the pattern is a glob or not. - -**True** - -Patterns that have glob characters or regex patterns will return `true`: - -```js -isGlob('!foo.js', {strict: false}); -isGlob('*.js', {strict: false}); -isGlob('**/abc.js', {strict: false}); -isGlob('abc/*.js', {strict: false}); -isGlob('abc/(aaa|bbb).js', {strict: false}); -isGlob('abc/[a-z].js', {strict: false}); -isGlob('abc/{a,b}.js', {strict: false}); -//=> true -``` - -Extglobs - -```js -isGlob('abc/@(a).js', {strict: false}); -isGlob('abc/!(a).js', {strict: false}); -isGlob('abc/+(a).js', {strict: false}); -isGlob('abc/*(a).js', {strict: false}); -isGlob('abc/?(a).js', {strict: false}); -//=> true -``` - -**False** - -Escaped globs or extglobs return `false`: - -```js -isGlob('\\!foo.js', {strict: false}); -isGlob('\\*.js', {strict: false}); -isGlob('\\*\\*/abc.js', {strict: false}); -isGlob('abc/\\*.js', {strict: false}); -isGlob('abc/\\(aaa|bbb).js', {strict: false}); -isGlob('abc/\\[a-z].js', {strict: false}); -isGlob('abc/\\{a,b}.js', {strict: false}); -//=> false -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [assemble](https://www.npmjs.com/package/assemble): Get the rocks out of your socks! Assemble makes you fast at creating web projects… [more](https://github.com/assemble/assemble) | [homepage](https://github.com/assemble/assemble "Get the rocks out of your socks! Assemble makes you fast at creating web projects. Assemble is used by thousands of projects for rapid prototyping, creating themes, scaffolds, boilerplates, e-books, UI components, API documentation, blogs, building websit") -* [base](https://www.npmjs.com/package/base): Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks | [homepage](https://github.com/node-base/base "Framework for rapidly creating high quality, server-side node.js applications, using plugins like building blocks") -* [update](https://www.npmjs.com/package/update): Be scalable! Update is a new, open source developer framework and CLI for automating updates… [more](https://github.com/update/update) | [homepage](https://github.com/update/update "Be scalable! Update is a new, open source developer framework and CLI for automating updates of any kind in code projects.") -* [verb](https://www.npmjs.com/package/verb): Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used… [more](https://github.com/verbose/verb) | [homepage](https://github.com/verbose/verb "Documentation generator for GitHub projects. Verb is extremely powerful, easy to use, and is used on hundreds of projects of all sizes to generate everything from API docs to readmes.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 47 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [doowb](https://github.com/doowb) | -| 1 | [phated](https://github.com/phated) | -| 1 | [danhper](https://github.com/danhper) | -| 1 | [paulmillr](https://github.com/paulmillr) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on March 27, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js deleted file mode 100644 index 620f563..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/index.js +++ /dev/null @@ -1,150 +0,0 @@ -/*! - * is-glob - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -var isExtglob = require('is-extglob'); -var chars = { '{': '}', '(': ')', '[': ']'}; -var strictCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - var pipeIndex = -2; - var closeSquareIndex = -2; - var closeCurlyIndex = -2; - var closeParenIndex = -2; - var backSlashIndex = -2; - while (index < str.length) { - if (str[index] === '*') { - return true; - } - - if (str[index + 1] === '?' && /[\].+)]/.test(str[index])) { - return true; - } - - if (closeSquareIndex !== -1 && str[index] === '[' && str[index + 1] !== ']') { - if (closeSquareIndex < index) { - closeSquareIndex = str.indexOf(']', index); - } - if (closeSquareIndex > index) { - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeSquareIndex) { - return true; - } - } - } - - if (closeCurlyIndex !== -1 && str[index] === '{' && str[index + 1] !== '}') { - closeCurlyIndex = str.indexOf('}', index); - if (closeCurlyIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeCurlyIndex) { - return true; - } - } - } - - if (closeParenIndex !== -1 && str[index] === '(' && str[index + 1] === '?' && /[:!=]/.test(str[index + 2]) && str[index + 3] !== ')') { - closeParenIndex = str.indexOf(')', index); - if (closeParenIndex > index) { - backSlashIndex = str.indexOf('\\', index); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - - if (pipeIndex !== -1 && str[index] === '(' && str[index + 1] !== '|') { - if (pipeIndex < index) { - pipeIndex = str.indexOf('|', index); - } - if (pipeIndex !== -1 && str[pipeIndex + 1] !== ')') { - closeParenIndex = str.indexOf(')', pipeIndex); - if (closeParenIndex > pipeIndex) { - backSlashIndex = str.indexOf('\\', pipeIndex); - if (backSlashIndex === -1 || backSlashIndex > closeParenIndex) { - return true; - } - } - } - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -var relaxedCheck = function(str) { - if (str[0] === '!') { - return true; - } - var index = 0; - while (index < str.length) { - if (/[*?{}()[\]]/.test(str[index])) { - return true; - } - - if (str[index] === '\\') { - var open = str[index + 1]; - index += 2; - var close = chars[open]; - - if (close) { - var n = str.indexOf(close, index); - if (n !== -1) { - index = n + 1; - } - } - - if (str[index] === '!') { - return true; - } - } else { - index++; - } - } - return false; -}; - -module.exports = function isGlob(str, options) { - if (typeof str !== 'string' || str === '') { - return false; - } - - if (isExtglob(str)) { - return true; - } - - var check = strictCheck; - - // optionally relax check - if (options && options.strict === false) { - check = relaxedCheck; - } - - return check(str); -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json deleted file mode 100644 index 858af03..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-glob/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "is-glob", - "description": "Returns `true` if the given string looks like a glob pattern or an extglob pattern. This makes it easy to create code that only uses external modules like node-glob when necessary, resulting in much faster code execution and initialization time, and a better user experience.", - "version": "4.0.3", - "homepage": "https://github.com/micromatch/is-glob", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Brian Woodward (https://twitter.com/doowb)", - "Daniel Perez (https://tuvistavie.com)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)" - ], - "repository": "micromatch/is-glob", - "bugs": { - "url": "https://github.com/micromatch/is-glob/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.10.0" - }, - "scripts": { - "test": "mocha && node benchmark.js" - }, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "devDependencies": { - "gulp-format-md": "^0.1.10", - "mocha": "^3.0.2" - }, - "keywords": [ - "bash", - "braces", - "check", - "exec", - "expression", - "extglob", - "glob", - "globbing", - "globstar", - "is", - "match", - "matches", - "pattern", - "regex", - "regular", - "string", - "test" - ], - "verb": { - "layout": "default", - "plugins": [ - "gulp-format-md" - ], - "related": { - "list": [ - "assemble", - "base", - "update", - "verb" - ] - }, - "reflinks": [ - "assemble", - "bach", - "base", - "composer", - "gulp", - "has-glob", - "is-valid-glob", - "micromatch", - "npm", - "scaffold", - "verb", - "vinyl" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE deleted file mode 100644 index 9af4a67..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md deleted file mode 100644 index eb8149e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/README.md +++ /dev/null @@ -1,187 +0,0 @@ -# is-number [![NPM version](https://img.shields.io/npm/v/is-number.svg?style=flat)](https://www.npmjs.com/package/is-number) [![NPM monthly downloads](https://img.shields.io/npm/dm/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![NPM total downloads](https://img.shields.io/npm/dt/is-number.svg?style=flat)](https://npmjs.org/package/is-number) [![Linux Build Status](https://img.shields.io/travis/jonschlinkert/is-number.svg?style=flat&label=Travis)](https://travis-ci.org/jonschlinkert/is-number) - -> Returns true if the value is a finite number. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save is-number -``` - -## Why is this needed? - -In JavaScript, it's not always as straightforward as it should be to reliably check if a value is a number. It's common for devs to use `+`, `-`, or `Number()` to cast a string value to a number (for example, when values are returned from user input, regex matches, parsers, etc). But there are many non-intuitive edge cases that yield unexpected results: - -```js -console.log(+[]); //=> 0 -console.log(+''); //=> 0 -console.log(+' '); //=> 0 -console.log(typeof NaN); //=> 'number' -``` - -This library offers a performant way to smooth out edge cases like these. - -## Usage - -```js -const isNumber = require('is-number'); -``` - -See the [tests](./test.js) for more examples. - -### true - -```js -isNumber(5e3); // true -isNumber(0xff); // true -isNumber(-1.1); // true -isNumber(0); // true -isNumber(1); // true -isNumber(1.1); // true -isNumber(10); // true -isNumber(10.10); // true -isNumber(100); // true -isNumber('-1.1'); // true -isNumber('0'); // true -isNumber('012'); // true -isNumber('0xff'); // true -isNumber('1'); // true -isNumber('1.1'); // true -isNumber('10'); // true -isNumber('10.10'); // true -isNumber('100'); // true -isNumber('5e3'); // true -isNumber(parseInt('012')); // true -isNumber(parseFloat('012')); // true -``` - -### False - -Everything else is false, as you would expect: - -```js -isNumber(Infinity); // false -isNumber(NaN); // false -isNumber(null); // false -isNumber(undefined); // false -isNumber(''); // false -isNumber(' '); // false -isNumber('foo'); // false -isNumber([1]); // false -isNumber([]); // false -isNumber(function () {}); // false -isNumber({}); // false -``` - -## Release history - -### 7.0.0 - -* Refactor. Now uses `.isFinite` if it exists. -* Performance is about the same as v6.0 when the value is a string or number. But it's now 3x-4x faster when the value is not a string or number. - -### 6.0.0 - -* Optimizations, thanks to @benaadams. - -### 5.0.0 - -**Breaking changes** - -* removed support for `instanceof Number` and `instanceof String` - -## Benchmarks - -As with all benchmarks, take these with a grain of salt. See the [benchmarks](./benchmark/index.js) for more detail. - -``` -# all -v7.0 x 413,222 ops/sec ±2.02% (86 runs sampled) -v6.0 x 111,061 ops/sec ±1.29% (85 runs sampled) -parseFloat x 317,596 ops/sec ±1.36% (86 runs sampled) -fastest is 'v7.0' - -# string -v7.0 x 3,054,496 ops/sec ±1.05% (89 runs sampled) -v6.0 x 2,957,781 ops/sec ±0.98% (88 runs sampled) -parseFloat x 3,071,060 ops/sec ±1.13% (88 runs sampled) -fastest is 'parseFloat,v7.0' - -# number -v7.0 x 3,146,895 ops/sec ±0.89% (89 runs sampled) -v6.0 x 3,214,038 ops/sec ±1.07% (89 runs sampled) -parseFloat x 3,077,588 ops/sec ±1.07% (87 runs sampled) -fastest is 'v6.0' -``` - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [is-plain-object](https://www.npmjs.com/package/is-plain-object): Returns true if an object was created by the `Object` constructor. | [homepage](https://github.com/jonschlinkert/is-plain-object "Returns true if an object was created by the `Object` constructor.") -* [is-primitive](https://www.npmjs.com/package/is-primitive): Returns `true` if the value is a primitive. | [homepage](https://github.com/jonschlinkert/is-primitive "Returns `true` if the value is a primitive. ") -* [isobject](https://www.npmjs.com/package/isobject): Returns true if the value is an object and not an array or null. | [homepage](https://github.com/jonschlinkert/isobject "Returns true if the value is an object and not an array or null.") -* [kind-of](https://www.npmjs.com/package/kind-of): Get the native type of a value. | [homepage](https://github.com/jonschlinkert/kind-of "Get the native type of a value.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 49 | [jonschlinkert](https://github.com/jonschlinkert) | -| 5 | [charlike-old](https://github.com/charlike-old) | -| 1 | [benaadams](https://github.com/benaadams) | -| 1 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) - -### License - -Copyright © 2018, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 15, 2018._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js deleted file mode 100644 index 27f19b7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/index.js +++ /dev/null @@ -1,18 +0,0 @@ -/*! - * is-number - * - * Copyright (c) 2014-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -module.exports = function(num) { - if (typeof num === 'number') { - return num - num === 0; - } - if (typeof num === 'string' && num.trim() !== '') { - return Number.isFinite ? Number.isFinite(+num) : isFinite(+num); - } - return false; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json deleted file mode 100644 index 3715072..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-number/package.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "name": "is-number", - "description": "Returns true if a number or string value is a finite number. Useful for regex matches, parsing, user input, etc.", - "version": "7.0.0", - "homepage": "https://github.com/jonschlinkert/is-number", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Olsten Larck (https://i.am.charlike.online)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "jonschlinkert/is-number", - "bugs": { - "url": "https://github.com/jonschlinkert/is-number/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=0.12.0" - }, - "scripts": { - "test": "mocha" - }, - "devDependencies": { - "ansi": "^0.3.1", - "benchmark": "^2.1.4", - "gulp-format-md": "^1.0.0", - "mocha": "^3.5.3" - }, - "keywords": [ - "cast", - "check", - "coerce", - "coercion", - "finite", - "integer", - "is", - "isnan", - "is-nan", - "is-num", - "is-number", - "isnumber", - "isfinite", - "istype", - "kind", - "math", - "nan", - "num", - "number", - "numeric", - "parseFloat", - "parseInt", - "test", - "type", - "typeof", - "value" - ], - "verb": { - "toc": false, - "layout": "default", - "tasks": [ - "readme" - ], - "related": { - "list": [ - "is-plain-object", - "is-primitive", - "isobject", - "kind-of" - ] - }, - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts deleted file mode 100644 index eee2e83..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.d.ts +++ /dev/null @@ -1,79 +0,0 @@ -import * as stream from 'stream'; - -declare const isStream: { - /** - @returns Whether `stream` is a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream(fs.createReadStream('unicorn.png')); - //=> true - - isStream({}); - //=> false - ``` - */ - (stream: unknown): stream is stream.Stream; - - /** - @returns Whether `stream` is a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream.writable(fs.createWriteStrem('unicorn.txt')); - //=> true - ``` - */ - writable(stream: unknown): stream is stream.Writable; - - /** - @returns Whether `stream` is a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). - - @example - ``` - import * as fs from 'fs'; - import isStream = require('is-stream'); - - isStream.readable(fs.createReadStream('unicorn.png')); - //=> true - ``` - */ - readable(stream: unknown): stream is stream.Readable; - - /** - @returns Whether `stream` is a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). - - @example - ``` - import {Duplex} from 'stream'; - import isStream = require('is-stream'); - - isStream.duplex(new Duplex()); - //=> true - ``` - */ - duplex(stream: unknown): stream is stream.Duplex; - - /** - @returns Whether `stream` is a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). - - @example - ``` - import * as fs from 'fs'; - import Stringify = require('streaming-json-stringify'); - import isStream = require('is-stream'); - - isStream.transform(Stringify()); - //=> true - ``` - */ - transform(input: unknown): input is stream.Transform; -}; - -export = isStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js deleted file mode 100644 index 2e43434..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/index.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -const isStream = stream => - stream !== null && - typeof stream === 'object' && - typeof stream.pipe === 'function'; - -isStream.writable = stream => - isStream(stream) && - stream.writable !== false && - typeof stream._write === 'function' && - typeof stream._writableState === 'object'; - -isStream.readable = stream => - isStream(stream) && - stream.readable !== false && - typeof stream._read === 'function' && - typeof stream._readableState === 'object'; - -isStream.duplex = stream => - isStream.writable(stream) && - isStream.readable(stream); - -isStream.transform = stream => - isStream.duplex(stream) && - typeof stream._transform === 'function'; - -module.exports = isStream; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json deleted file mode 100644 index c3b5673..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "is-stream", - "version": "2.0.1", - "description": "Check if something is a Node.js stream", - "license": "MIT", - "repository": "sindresorhus/is-stream", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "stream", - "type", - "streams", - "writable", - "readable", - "duplex", - "transform", - "check", - "detect", - "is" - ], - "devDependencies": { - "@types/node": "^11.13.6", - "ava": "^1.4.1", - "tempy": "^0.3.0", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md deleted file mode 100644 index 19308e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/is-stream/readme.md +++ /dev/null @@ -1,60 +0,0 @@ -# is-stream - -> Check if something is a [Node.js stream](https://nodejs.org/api/stream.html) - -## Install - -``` -$ npm install is-stream -``` - -## Usage - -```js -const fs = require('fs'); -const isStream = require('is-stream'); - -isStream(fs.createReadStream('unicorn.png')); -//=> true - -isStream({}); -//=> false -``` - -## API - -### isStream(stream) - -Returns a `boolean` for whether it's a [`Stream`](https://nodejs.org/api/stream.html#stream_stream). - -#### isStream.writable(stream) - -Returns a `boolean` for whether it's a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable). - -#### isStream.readable(stream) - -Returns a `boolean` for whether it's a [`stream.Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable). - -#### isStream.duplex(stream) - -Returns a `boolean` for whether it's a [`stream.Duplex`](https://nodejs.org/api/stream.html#stream_class_stream_duplex). - -#### isStream.transform(stream) - -Returns a `boolean` for whether it's a [`stream.Transform`](https://nodejs.org/api/stream.html#stream_class_stream_transform). - -## Related - -- [is-file-stream](https://github.com/jamestalmage/is-file-stream) - Detect if a stream is a file stream - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore deleted file mode 100644 index c1cb757..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -.nyc_output/ -coverage/ diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md deleted file mode 100644 index 35769e8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/README.md +++ /dev/null @@ -1,51 +0,0 @@ -# isexe - -Minimal module to check if a file is executable, and a normal file. - -Uses `fs.stat` and tests against the `PATHEXT` environment variable on -Windows. - -## USAGE - -```javascript -var isexe = require('isexe') -isexe('some-file-name', function (err, isExe) { - if (err) { - console.error('probably file does not exist or something', err) - } else if (isExe) { - console.error('this thing can be run') - } else { - console.error('cannot be run') - } -}) - -// same thing but synchronous, throws errors -var isExe = isexe.sync('some-file-name') - -// treat errors as just "not executable" -isexe('maybe-missing-file', { ignoreErrors: true }, callback) -var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) -``` - -## API - -### `isexe(path, [options], [callback])` - -Check if the path is executable. If no callback provided, and a -global `Promise` object is available, then a Promise will be returned. - -Will raise whatever errors may be raised by `fs.stat`, unless -`options.ignoreErrors` is set to true. - -### `isexe.sync(path, [options])` - -Same as `isexe` but returns the value and throws any errors raised. - -### Options - -* `ignoreErrors` Treat all errors as "no, this is not executable", but - don't raise them. -* `uid` Number to use as the user id -* `gid` Number to use as the group id -* `pathExt` List of path extensions to use instead of `PATHEXT` - environment variable on Windows. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js deleted file mode 100644 index 553fb32..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/index.js +++ /dev/null @@ -1,57 +0,0 @@ -var fs = require('fs') -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = require('./windows.js') -} else { - core = require('./mode.js') -} - -module.exports = isexe -isexe.sync = sync - -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } - - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) - } - - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } - } - cb(er, is) - }) -} - -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js deleted file mode 100644 index 1995ea4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/mode.js +++ /dev/null @@ -1,41 +0,0 @@ -module.exports = isexe -isexe.sync = sync - -var fs = require('fs') - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} - -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} - -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid - - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() - - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g - - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 - - return ret -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json deleted file mode 100644 index e452689..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "isexe", - "version": "2.0.0", - "description": "Minimal module to check if a file is executable.", - "main": "index.js", - "directories": { - "test": "test" - }, - "devDependencies": { - "mkdirp": "^0.5.1", - "rimraf": "^2.5.0", - "tap": "^10.3.0" - }, - "scripts": { - "test": "tap test/*.js --100", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --all; git push origin --tags" - }, - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/isexe.git" - }, - "keywords": [], - "bugs": { - "url": "https://github.com/isaacs/isexe/issues" - }, - "homepage": "https://github.com/isaacs/isexe#readme" -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js deleted file mode 100644 index d926df6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/test/basic.js +++ /dev/null @@ -1,221 +0,0 @@ -var t = require('tap') -var fs = require('fs') -var path = require('path') -var fixture = path.resolve(__dirname, 'fixtures') -var meow = fixture + '/meow.cat' -var mine = fixture + '/mine.cat' -var ours = fixture + '/ours.cat' -var fail = fixture + '/fail.false' -var noent = fixture + '/enoent.exe' -var mkdirp = require('mkdirp') -var rimraf = require('rimraf') - -var isWindows = process.platform === 'win32' -var hasAccess = typeof fs.access === 'function' -var winSkip = isWindows && 'windows' -var accessSkip = !hasAccess && 'no fs.access function' -var hasPromise = typeof Promise === 'function' -var promiseSkip = !hasPromise && 'no global Promise' - -function reset () { - delete require.cache[require.resolve('../')] - return require('../') -} - -t.test('setup fixtures', function (t) { - rimraf.sync(fixture) - mkdirp.sync(fixture) - fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') - fs.chmodSync(meow, parseInt('0755', 8)) - fs.writeFileSync(fail, '#!/usr/bin/env false\n') - fs.chmodSync(fail, parseInt('0644', 8)) - fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') - fs.chmodSync(mine, parseInt('0744', 8)) - fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') - fs.chmodSync(ours, parseInt('0754', 8)) - t.end() -}) - -t.test('promise', { skip: promiseSkip }, function (t) { - var isexe = reset() - t.test('meow async', function (t) { - isexe(meow).then(function (is) { - t.ok(is) - t.end() - }) - }) - t.test('fail async', function (t) { - isexe(fail).then(function (is) { - t.notOk(is) - t.end() - }) - }) - t.test('noent async', function (t) { - isexe(noent).catch(function (er) { - t.ok(er) - t.end() - }) - }) - t.test('noent ignore async', function (t) { - isexe(noent, { ignoreErrors: true }).then(function (is) { - t.notOk(is) - t.end() - }) - }) - t.end() -}) - -t.test('no promise', function (t) { - global.Promise = null - var isexe = reset() - t.throws('try to meow a promise', function () { - isexe(meow) - }) - t.end() -}) - -t.test('access', { skip: accessSkip || winSkip }, function (t) { - runTest(t) -}) - -t.test('mode', { skip: winSkip }, function (t) { - delete fs.access - delete fs.accessSync - var isexe = reset() - t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) - t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) - runTest(t) -}) - -t.test('windows', function (t) { - global.TESTING_WINDOWS = true - var pathExt = '.EXE;.CAT;.CMD;.COM' - t.test('pathExt option', function (t) { - runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) - }) - t.test('pathExt env', function (t) { - process.env.PATHEXT = pathExt - runTest(t) - }) - t.test('no pathExt', function (t) { - // with a pathExt of '', any filename is fine. - // so the "fail" one would still pass. - runTest(t, { pathExt: '', skipFail: true }) - }) - t.test('pathext with empty entry', function (t) { - // with a pathExt of '', any filename is fine. - // so the "fail" one would still pass. - runTest(t, { pathExt: ';' + pathExt, skipFail: true }) - }) - t.end() -}) - -t.test('cleanup', function (t) { - rimraf.sync(fixture) - t.end() -}) - -function runTest (t, options) { - var isexe = reset() - - var optionsIgnore = Object.create(options || {}) - optionsIgnore.ignoreErrors = true - - if (!options || !options.skipFail) { - t.notOk(isexe.sync(fail, options)) - } - t.notOk(isexe.sync(noent, optionsIgnore)) - if (!options) { - t.ok(isexe.sync(meow)) - } else { - t.ok(isexe.sync(meow, options)) - } - - t.ok(isexe.sync(mine, options)) - t.ok(isexe.sync(ours, options)) - t.throws(function () { - isexe.sync(noent, options) - }) - - t.test('meow async', function (t) { - if (!options) { - isexe(meow, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - } else { - isexe(meow, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - } - }) - - t.test('mine async', function (t) { - isexe(mine, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - }) - - t.test('ours async', function (t) { - isexe(ours, options, function (er, is) { - if (er) { - throw er - } - t.ok(is) - t.end() - }) - }) - - if (!options || !options.skipFail) { - t.test('fail async', function (t) { - isexe(fail, options, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - } - - t.test('noent async', function (t) { - isexe(noent, options, function (er, is) { - t.ok(er) - t.notOk(is) - t.end() - }) - }) - - t.test('noent ignore async', function (t) { - isexe(noent, optionsIgnore, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - - t.test('directory is not executable', function (t) { - isexe(__dirname, options, function (er, is) { - if (er) { - throw er - } - t.notOk(is) - t.end() - }) - }) - - t.end() -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js deleted file mode 100644 index 3499673..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/isexe/windows.js +++ /dev/null @@ -1,42 +0,0 @@ -module.exports = isexe -isexe.sync = sync - -var fs = require('fs') - -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT - - if (!pathext) { - return true - } - - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false -} - -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) -} - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE deleted file mode 100644 index 94a4c0a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Stephen Sugden (stephensugden.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md deleted file mode 100644 index 0d54841..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# merge-stream - -Merge (interleave) a bunch of streams. - -[![build status](https://secure.travis-ci.org/grncdr/merge-stream.svg?branch=master)](http://travis-ci.org/grncdr/merge-stream) - -## Synopsis - -```javascript -var stream1 = new Stream(); -var stream2 = new Stream(); - -var merged = mergeStream(stream1, stream2); - -var stream3 = new Stream(); -merged.add(stream3); -merged.isEmpty(); -//=> false -``` - -## Description - -This is adapted from [event-stream](https://github.com/dominictarr/event-stream) separated into a new module, using Streams3. - -## API - -### `mergeStream` - -Type: `function` - -Merges an arbitrary number of streams. Returns a merged stream. - -#### `merged.add` - -A method to dynamically add more sources to the stream. The argument supplied to `add` can be either a source or an array of sources. - -#### `merged.isEmpty` - -A method that tells you if the merged stream is empty. - -When a stream is "empty" (aka. no sources were added), it could not be returned to a gulp task. - -So, we could do something like this: - -```js -stream = require('merge-stream')(); -// Something like a loop to add some streams to the merge stream -// stream.add(streamA); -// stream.add(streamB); -return stream.isEmpty() ? null : stream; -``` - -## Gulp example - -An example use case for **merge-stream** is to combine parts of a task in a project's **gulpfile.js** like this: - -```js -const gulp = require('gulp'); -const htmlValidator = require('gulp-w3c-html-validator'); -const jsHint = require('gulp-jshint'); -const mergeStream = require('merge-stream'); - -function lint() { - return mergeStream( - gulp.src('src/*.html') - .pipe(htmlValidator()) - .pipe(htmlValidator.reporter()), - gulp.src('src/*.js') - .pipe(jsHint()) - .pipe(jsHint.reporter()) - ); -} -gulp.task('lint', lint); -``` - -## License - -MIT diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js deleted file mode 100644 index b1a9e1a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/index.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -const { PassThrough } = require('stream'); - -module.exports = function (/*streams...*/) { - var sources = [] - var output = new PassThrough({objectMode: true}) - - output.setMaxListeners(0) - - output.add = add - output.isEmpty = isEmpty - - output.on('unpipe', remove) - - Array.prototype.slice.call(arguments).forEach(add) - - return output - - function add (source) { - if (Array.isArray(source)) { - source.forEach(add) - return this - } - - sources.push(source); - source.once('end', remove.bind(null, source)) - source.once('error', output.emit.bind(output, 'error')) - source.pipe(output, {end: false}) - return this - } - - function isEmpty () { - return sources.length == 0; - } - - function remove (source) { - sources = sources.filter(function (it) { return it !== source }) - if (!sources.length && output.readable) { output.end() } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json deleted file mode 100644 index 1a4c54c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge-stream/package.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name": "merge-stream", - "version": "2.0.0", - "description": "Create a stream that emits events from multiple other streams", - "files": [ - "index.js" - ], - "scripts": { - "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" - }, - "repository": "grncdr/merge-stream", - "author": "Stephen Sugden ", - "license": "MIT", - "dependencies": {}, - "devDependencies": { - "from2": "^2.0.3", - "istanbul": "^0.4.5" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE deleted file mode 100644 index 31dd9c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-2020 Teambition - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md deleted file mode 100644 index 27f8eb9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/README.md +++ /dev/null @@ -1,144 +0,0 @@ -# merge2 - -Merge multiple streams into one stream in sequence or parallel. - -[![NPM version][npm-image]][npm-url] -[![Build Status][travis-image]][travis-url] -[![Downloads][downloads-image]][downloads-url] - -## Install - -Install with [npm](https://npmjs.org/package/merge2) - -```sh -npm install merge2 -``` - -## Usage - -```js -const gulp = require('gulp') -const merge2 = require('merge2') -const concat = require('gulp-concat') -const minifyHtml = require('gulp-minify-html') -const ngtemplate = require('gulp-ngtemplate') - -gulp.task('app-js', function () { - return merge2( - gulp.src('static/src/tpl/*.html') - .pipe(minifyHtml({empty: true})) - .pipe(ngtemplate({ - module: 'genTemplates', - standalone: true - }) - ), gulp.src([ - 'static/src/js/app.js', - 'static/src/js/locale_zh-cn.js', - 'static/src/js/router.js', - 'static/src/js/tools.js', - 'static/src/js/services.js', - 'static/src/js/filters.js', - 'static/src/js/directives.js', - 'static/src/js/controllers.js' - ]) - ) - .pipe(concat('app.js')) - .pipe(gulp.dest('static/dist/js/')) -}) -``` - -```js -const stream = merge2([stream1, stream2], stream3, {end: false}) -//... -stream.add(stream4, stream5) -//.. -stream.end() -``` - -```js -// equal to merge2([stream1, stream2], stream3) -const stream = merge2() -stream.add([stream1, stream2]) -stream.add(stream3) -``` - -```js -// merge order: -// 1. merge `stream1`; -// 2. merge `stream2` and `stream3` in parallel after `stream1` merged; -// 3. merge 'stream4' after `stream2` and `stream3` merged; -const stream = merge2(stream1, [stream2, stream3], stream4) - -// merge order: -// 1. merge `stream5` and `stream6` in parallel after `stream4` merged; -// 2. merge 'stream7' after `stream5` and `stream6` merged; -stream.add([stream5, stream6], stream7) -``` - -```js -// nest merge -// equal to merge2(stream1, stream2, stream6, stream3, [stream4, stream5]); -const streamA = merge2(stream1, stream2) -const streamB = merge2(stream3, [stream4, stream5]) -const stream = merge2(streamA, streamB) -streamA.add(stream6) -``` - -## API - -```js -const merge2 = require('merge2') -``` - -### merge2() - -### merge2(options) - -### merge2(stream1, stream2, ..., streamN) - -### merge2(stream1, stream2, ..., streamN, options) - -### merge2(stream1, [stream2, stream3, ...], streamN, options) - -return a duplex stream (mergedStream). streams in array will be merged in parallel. - -### mergedStream.add(stream) - -### mergedStream.add(stream1, [stream2, stream3, ...], ...) - -return the mergedStream. - -### mergedStream.on('queueDrain', function() {}) - -It will emit 'queueDrain' when all streams merged. If you set `end === false` in options, this event give you a notice that should add more streams to merge or end the mergedStream. - -#### stream - -*option* -Type: `Readable` or `Duplex` or `Transform` stream. - -#### options - -*option* -Type: `Object`. - -* **end** - `Boolean` - if `end === false` then mergedStream will not be auto ended, you should end by yourself. **Default:** `undefined` - -* **pipeError** - `Boolean` - if `pipeError === true` then mergedStream will emit `error` event from source streams. **Default:** `undefined` - -* **objectMode** - `Boolean` . **Default:** `true` - -`objectMode` and other options(`highWaterMark`, `defaultEncoding` ...) is same as Node.js `Stream`. - -## License - -MIT © [Teambition](https://www.teambition.com) - -[npm-url]: https://npmjs.org/package/merge2 -[npm-image]: http://img.shields.io/npm/v/merge2.svg - -[travis-url]: https://travis-ci.org/teambition/merge2 -[travis-image]: http://img.shields.io/travis/teambition/merge2.svg - -[downloads-url]: https://npmjs.org/package/merge2 -[downloads-image]: http://img.shields.io/npm/dm/merge2.svg?style=flat-square diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js deleted file mode 100644 index 78a61ed..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/index.js +++ /dev/null @@ -1,144 +0,0 @@ -'use strict' -/* - * merge2 - * https://github.com/teambition/merge2 - * - * Copyright (c) 2014-2020 Teambition - * Licensed under the MIT license. - */ -const Stream = require('stream') -const PassThrough = Stream.PassThrough -const slice = Array.prototype.slice - -module.exports = merge2 - -function merge2 () { - const streamsQueue = [] - const args = slice.call(arguments) - let merging = false - let options = args[args.length - 1] - - if (options && !Array.isArray(options) && options.pipe == null) { - args.pop() - } else { - options = {} - } - - const doEnd = options.end !== false - const doPipeError = options.pipeError === true - if (options.objectMode == null) { - options.objectMode = true - } - if (options.highWaterMark == null) { - options.highWaterMark = 64 * 1024 - } - const mergedStream = PassThrough(options) - - function addStream () { - for (let i = 0, len = arguments.length; i < len; i++) { - streamsQueue.push(pauseStreams(arguments[i], options)) - } - mergeStream() - return this - } - - function mergeStream () { - if (merging) { - return - } - merging = true - - let streams = streamsQueue.shift() - if (!streams) { - process.nextTick(endStream) - return - } - if (!Array.isArray(streams)) { - streams = [streams] - } - - let pipesCount = streams.length + 1 - - function next () { - if (--pipesCount > 0) { - return - } - merging = false - mergeStream() - } - - function pipe (stream) { - function onend () { - stream.removeListener('merge2UnpipeEnd', onend) - stream.removeListener('end', onend) - if (doPipeError) { - stream.removeListener('error', onerror) - } - next() - } - function onerror (err) { - mergedStream.emit('error', err) - } - // skip ended stream - if (stream._readableState.endEmitted) { - return next() - } - - stream.on('merge2UnpipeEnd', onend) - stream.on('end', onend) - - if (doPipeError) { - stream.on('error', onerror) - } - - stream.pipe(mergedStream, { end: false }) - // compatible for old stream - stream.resume() - } - - for (let i = 0; i < streams.length; i++) { - pipe(streams[i]) - } - - next() - } - - function endStream () { - merging = false - // emit 'queueDrain' when all streams merged. - mergedStream.emit('queueDrain') - if (doEnd) { - mergedStream.end() - } - } - - mergedStream.setMaxListeners(0) - mergedStream.add = addStream - mergedStream.on('unpipe', function (stream) { - stream.emit('merge2UnpipeEnd') - }) - - if (args.length) { - addStream.apply(null, args) - } - return mergedStream -} - -// check and pause streams for pipe. -function pauseStreams (streams, options) { - if (!Array.isArray(streams)) { - // Backwards-compat with old-style streams - if (!streams._readableState && streams.pipe) { - streams = streams.pipe(PassThrough(options)) - } - if (!streams._readableState || !streams.pause || !streams.pipe) { - throw new Error('Only readable stream can be merged.') - } - streams.pause() - } else { - for (let i = 0, len = streams.length; i < len; i++) { - streams[i] = pauseStreams(streams[i], options) - } - } - return streams -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json deleted file mode 100644 index 7777307..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/merge2/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "merge2", - "description": "Merge multiple streams into one stream in sequence or parallel.", - "authors": [ - "Yan Qing " - ], - "license": "MIT", - "version": "1.4.1", - "main": "./index.js", - "repository": { - "type": "git", - "url": "git@github.com:teambition/merge2.git" - }, - "homepage": "https://github.com/teambition/merge2", - "keywords": [ - "merge2", - "multiple", - "sequence", - "parallel", - "merge", - "stream", - "merge stream", - "sync" - ], - "engines": { - "node": ">= 8" - }, - "dependencies": {}, - "devDependencies": { - "standard": "^14.3.4", - "through2": "^3.0.1", - "thunks": "^4.9.6", - "tman": "^1.10.0", - "to-through": "^2.0.0" - }, - "scripts": { - "test": "standard && tman" - }, - "files": [ - "README.md", - "index.js" - ] -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE deleted file mode 100755 index 9af4a67..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md deleted file mode 100644 index d72a059..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/README.md +++ /dev/null @@ -1,1024 +0,0 @@ -# micromatch [![NPM version](https://img.shields.io/npm/v/micromatch.svg?style=flat)](https://www.npmjs.com/package/micromatch) [![NPM monthly downloads](https://img.shields.io/npm/dm/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![NPM total downloads](https://img.shields.io/npm/dt/micromatch.svg?style=flat)](https://npmjs.org/package/micromatch) [![Tests](https://github.com/micromatch/micromatch/actions/workflows/test.yml/badge.svg)](https://github.com/micromatch/micromatch/actions/workflows/test.yml) - -> Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Table of Contents - -
-Details - - * [Install](#install) -- [Sponsors](#sponsors) - * [Gold Sponsors](#gold-sponsors) - * [Quickstart](#quickstart) - * [Why use micromatch?](#why-use-micromatch) - + [Matching features](#matching-features) - * [Switching to micromatch](#switching-to-micromatch) - + [From minimatch](#from-minimatch) - + [From multimatch](#from-multimatch) - * [API](#api) - * [Options](#options) - * [Options Examples](#options-examples) - + [options.basename](#optionsbasename) - + [options.bash](#optionsbash) - + [options.expandRange](#optionsexpandrange) - + [options.format](#optionsformat) - + [options.ignore](#optionsignore) - + [options.matchBase](#optionsmatchbase) - + [options.noextglob](#optionsnoextglob) - + [options.nonegate](#optionsnonegate) - + [options.noglobstar](#optionsnoglobstar) - + [options.nonull](#optionsnonull) - + [options.nullglob](#optionsnullglob) - + [options.onIgnore](#optionsonignore) - + [options.onMatch](#optionsonmatch) - + [options.onResult](#optionsonresult) - + [options.posixSlashes](#optionsposixslashes) - + [options.unescape](#optionsunescape) - * [Extended globbing](#extended-globbing) - + [Extglobs](#extglobs) - + [Braces](#braces) - + [Regex character classes](#regex-character-classes) - + [Regex groups](#regex-groups) - + [POSIX bracket expressions](#posix-bracket-expressions) - * [Notes](#notes) - + [Bash 4.3 parity](#bash-43-parity) - + [Backslashes](#backslashes) - * [Benchmarks](#benchmarks) - + [Running benchmarks](#running-benchmarks) - + [Latest results](#latest-results) - * [Contributing](#contributing) - * [About](#about) - -
- -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save micromatch -``` - -
- -# Sponsors - -[Become a Sponsor](https://github.com/sponsors/jonschlinkert) to add your logo to this README, or any of [my other projects](https://github.com/jonschlinkert?tab=repositories&q=&type=&language=&sort=stargazers) - -
- -## Quickstart - -```js -const micromatch = require('micromatch'); -// micromatch(list, patterns[, options]); -``` - -The [main export](#micromatch) takes a list of strings and one or more glob patterns: - -```js -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['f*', 'b*'])) //=> ['foo', 'bar', 'baz'] -console.log(micromatch(['foo', 'bar', 'baz', 'qux'], ['*', '!b*'])) //=> ['foo', 'qux'] -``` - -Use [.isMatch()](#ismatch) to for boolean matching: - -```js -console.log(micromatch.isMatch('foo', 'f*')) //=> true -console.log(micromatch.isMatch('foo', ['b*', 'f*'])) //=> true -``` - -[Switching](#switching-to-micromatch) from minimatch and multimatch is easy! - -
- -## Why use micromatch? - -> micromatch is a [replacement](#switching-to-micromatch) for minimatch and multimatch - -* Supports all of the same matching features as [minimatch](https://github.com/isaacs/minimatch) and [multimatch](https://github.com/sindresorhus/multimatch) -* More complete support for the Bash 4.3 specification than minimatch and multimatch. Micromatch passes _all of the spec tests_ from bash, including some that bash still fails. -* **Fast & Performant** - Loads in about 5ms and performs [fast matches](#benchmarks). -* **Glob matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories -* **[Advanced globbing](#extended-globbing)** - Supports [extglobs](#extglobs), [braces](#braces-1), and [POSIX brackets](#posix-bracket-expressions), and support for escaping special characters with `\` or quotes. -* **Accurate** - Covers more scenarios [than minimatch](https://github.com/yarnpkg/yarn/pull/3339) -* **Well tested** - More than 5,000 [test assertions](./test) -* **Windows support** - More reliable windows support than minimatch and multimatch. -* **[Safe](https://github.com/micromatch/braces#braces-is-safe)** - Micromatch is not subject to DoS with brace patterns like minimatch and multimatch. - -### Matching features - -* Support for multiple glob patterns (no need for wrappers like multimatch) -* Wildcards (`**`, `*.js`) -* Negation (`'!a/*.js'`, `'*!(b).js'`) -* [extglobs](#extglobs) (`+(x|y)`, `!(a|b)`) -* [POSIX character classes](#posix-bracket-expressions) (`[[:alpha:][:digit:]]`) -* [brace expansion](https://github.com/micromatch/braces) (`foo/{1..5}.md`, `bar/{a,b,c}.js`) -* regex character classes (`foo-[1-5].js`) -* regex logical "or" (`foo/(abc|xyz).js`) - -You can mix and match these features to create whatever patterns you need! - -## Switching to micromatch - -_(There is one notable difference between micromatch and minimatch in regards to how backslashes are handled. See [the notes about backslashes](#backslashes) for more information.)_ - -### From minimatch - -Use [micromatch.isMatch()](#ismatch) instead of `minimatch()`: - -```js -console.log(micromatch.isMatch('foo', 'b*')); //=> false -``` - -Use [micromatch.match()](#match) instead of `minimatch.match()`: - -```js -console.log(micromatch.match(['foo', 'bar'], 'b*')); //=> 'bar' -``` - -### From multimatch - -Same signature: - -```js -console.log(micromatch(['foo', 'bar', 'baz'], ['f*', '*z'])); //=> ['foo', 'baz'] -``` - -## API - -**Params** - -* `list` **{String|Array}**: List of strings to match. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) -* `returns` **{Array}**: Returns an array of matches - -**Example** - -```js -const mm = require('micromatch'); -// mm(list, patterns[, options]); - -console.log(mm(['a.js', 'a.txt'], ['*.js'])); -//=> [ 'a.js' ] -``` - -### [.matcher](index.js#L109) - -Returns a matcher function from the given glob `pattern` and `options`. The returned function takes a string to match as its only argument and returns true if the string is a match. - -**Params** - -* `pattern` **{String}**: Glob pattern -* `options` **{Object}** -* `returns` **{Function}**: Returns a matcher function. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matcher(pattern[, options]); - -const isMatch = mm.matcher('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.isMatch](index.js#L128) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* `str` **{String}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `[options]` **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.isMatch(string, patterns[, options]); - -console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(mm.isMatch('a.a', 'b.*')); //=> false -``` - -### [.not](index.js#L153) - -Returns a list of strings that _**do not match any**_ of the given `patterns`. - -**Params** - -* `list` **{Array}**: Array of strings to match. -* `patterns` **{String|Array}**: One or more glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array}**: Returns an array of strings that **do not match** the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.not(list, patterns[, options]); - -console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); -//=> ['b.b', 'c.c'] -``` - -### [.contains](index.js#L193) - -Returns true if the given `string` contains the given pattern. Similar to [.isMatch](#isMatch) but the pattern can match any part of the string. - -**Params** - -* `str` **{String}**: The string to match. -* `patterns` **{String|Array}**: Glob pattern to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any of the patterns matches any part of `str`. - -**Example** - -```js -var mm = require('micromatch'); -// mm.contains(string, pattern[, options]); - -console.log(mm.contains('aa/bb/cc', '*b')); -//=> true -console.log(mm.contains('aa/bb/cc', '*d')); -//=> false -``` - -### [.matchKeys](index.js#L235) - -Filter the keys of the given object with the given `glob` pattern and `options`. Does not attempt to match nested keys. If you need this feature, use [glob-object](https://github.com/jonschlinkert/glob-object) instead. - -**Params** - -* `object` **{Object}**: The object with keys to filter. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Object}**: Returns an object with only keys that match the given patterns. - -**Example** - -```js -const mm = require('micromatch'); -// mm.matchKeys(object, patterns[, options]); - -const obj = { aa: 'a', ab: 'b', ac: 'c' }; -console.log(mm.matchKeys(obj, '*b')); -//=> { ab: 'b' } -``` - -### [.some](index.js#L264) - -Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. Returns as soon as the first match is found. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any `patterns` matches any of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.some(list, patterns[, options]); - -console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// true -console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.every](index.js#L300) - -Returns true if every string in the given `list` matches any of the given glob `patterns`. - -**Params** - -* `list` **{String|Array}**: The string or array of strings to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if all `patterns` matches all of the strings in `list` - -**Example** - -```js -const mm = require('micromatch'); -// mm.every(list, patterns[, options]); - -console.log(mm.every('foo.js', ['foo.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); -// true -console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); -// false -console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); -// false -``` - -### [.all](index.js#L339) - -Returns true if **all** of the given `patterns` match the specified string. - -**Params** - -* `str` **{String|Array}**: The string to test. -* `patterns` **{String|Array}**: One or more glob patterns to use for matching. -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const mm = require('micromatch'); -// mm.all(string, patterns[, options]); - -console.log(mm.all('foo.js', ['foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', '!foo.js'])); -// false - -console.log(mm.all('foo.js', ['*.js', 'foo.js'])); -// true - -console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); -// true -``` - -### [.capture](index.js#L366) - -Returns an array of matches captured by `pattern` in `string, or`null` if the pattern did not match. - -**Params** - -* `glob` **{String}**: Glob pattern to use for matching. -* `input` **{String}**: String to match -* `options` **{Object}**: See available [options](#options) for changing how matches are performed -* `returns` **{Array|null}**: Returns an array of captures if the input matches the glob pattern, otherwise `null`. - -**Example** - -```js -const mm = require('micromatch'); -// mm.capture(pattern, string[, options]); - -console.log(mm.capture('test/*.js', 'test/foo.js')); -//=> ['foo'] -console.log(mm.capture('test/*.js', 'foo/bar.css')); -//=> null -``` - -### [.makeRe](index.js#L392) - -Create a regular expression from the given glob `pattern`. - -**Params** - -* `pattern` **{String}**: A glob pattern to convert to regex. -* `options` **{Object}** -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const mm = require('micromatch'); -// mm.makeRe(pattern[, options]); - -console.log(mm.makeRe('*.js')); -//=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ -``` - -### [.scan](index.js#L408) - -Scan a glob pattern to separate the pattern into segments. Used by the [split](#split) method. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.scan(pattern[, options]); -``` - -### [.parse](index.js#L424) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `glob` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as regex source string. - -**Example** - -```js -const mm = require('micromatch'); -const state = mm.parse(pattern[, options]); -``` - -### [.braces](index.js#L451) - -Process the given brace `pattern`. - -**Params** - -* `pattern` **{String}**: String with brace pattern to process. -* `options` **{Object}**: Any [options](#options) to change how expansion is performed. See the [braces](https://github.com/micromatch/braces) library for all available options. -* `returns` **{Array}** - -**Example** - -```js -const { braces } = require('micromatch'); -console.log(braces('foo/{a,b,c}/bar')); -//=> [ 'foo/(a|b|c)/bar' ] - -console.log(braces('foo/{a,b,c}/bar', { expand: true })); -//=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] -``` - -## Options - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Match dotfiles. Otherwise dotfiles are ignored unless a `.` is explicitly defined in the pattern. | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. This option is overridden by the `expandBrace` option. | -| `failglob` | `boolean` | `false` | Similar to the `failglob` behavior in Bash, throws an error when no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `boolean` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `lookbehinds` | `boolean` | `true` | Support regex positive and negative lookbehinds. Note that you must be using Node 8.1.10 or higher to enable regex lookbehinds. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Perform case-insensitive matching. Equivalent to the regex `i` flag. Note that this option is ignored when the `flags` option is defined. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with [extglobs](#extglobs) (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support [POSIX character classes](#posix-bracket-expressions) ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `string` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove preceding backslashes from escaped glob characters before creating the regular expression to perform matches. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatitibility. | - -## Options Examples - -### options.basename - -Allow glob patterns without slashes to match a file path based on its basename. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `matchBase`. - -**Type**: `Boolean` - -**Default**: `false` - -**Example** - -```js -micromatch(['a/b.js', 'a/c.md'], '*.js'); -//=> [] - -micromatch(['a/b.js', 'a/c.md'], '*.js', { basename: true }); -//=> ['a/b.js'] -``` - -### options.bash - -Enabled by default, this option enforces bash-like behavior with stars immediately following a bracket expression. Bash bracket expressions are similar to regex character classes, but unlike regex, a star following a bracket expression **does not repeat the bracketed characters**. Instead, the star is treated the same as any other star. - -**Type**: `Boolean` - -**Default**: `true` - -**Example** - -```js -const files = ['abc', 'ajz']; -console.log(micromatch(files, '[a-c]*')); -//=> ['abc', 'ajz'] - -console.log(micromatch(files, '[a-c]*', { bash: false })); -``` - -### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a numeric folder name between `01` and `25`, with leading zeros. - -```js -const fill = require('fill-range'); -const regex = micromatch.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex) -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')) //=> true -``` - -### options.ignore - -String or array of glob patterns to match files to ignore. - -**Type**: `String|Array` - -**Default**: `undefined` - -```js -const isMatch = micromatch.matcher('*', { ignore: 'f*' }); -console.log(isMatch('foo')) //=> false -console.log(isMatch('bar')) //=> true -console.log(isMatch('baz')) //=> true -``` - -### options.matchBase - -Alias for [options.basename](#options-basename). - -### options.noextglob - -Disable extglob support, so that [extglobs](#extglobs) are regarded as literal characters. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Examples** - -```js -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)')); -//=> ['a/b', 'a/!(z)'] - -console.log(micromatch(['a/z', 'a/b', 'a/!(z)'], 'a/!(z)', { noextglob: true })); -//=> ['a/!(z)'] (matches only as literal characters) -``` - -### options.nonegate - -Disallow negation (`!`) patterns, and treat leading `!` as a literal character to match. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.noglobstar - -Disable matching with globstars (`**`). - -**Type**: `Boolean` - -**Default**: `undefined` - -```js -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**'); -//=> ['a/b', 'a/b/c', 'a/b/c/d'] - -micromatch(['a/b', 'a/b/c', 'a/b/c/d'], 'a/**', {noglobstar: true}); -//=> ['a/b'] -``` - -### options.nonull - -Alias for [options.nullglob](#options-nullglob). - -### options.nullglob - -If `true`, when no matches are found the actual (arrayified) glob pattern is returned instead of an empty array. Same behavior as [minimatch](https://github.com/isaacs/minimatch) option `nonull`. - -**Type**: `Boolean` - -**Default**: `undefined` - -### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); - // { glob: '*', regex: /^(?:(?!\.)(?=.)[^\/]*?\/?)$/, input: 'foo', output: 'foo' } -}; - -const isMatch = micromatch.matcher('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ input, output }); - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } - // { input: 'some\\path', output: 'some/path' } -}; - -const isMatch = micromatch.matcher('**', { onMatch, posixSlashes: true }); -isMatch('some\\path'); -isMatch('some\\path'); -isMatch('some\\path'); -``` - -### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = micromatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -### options.posixSlashes - -Convert path separators on returned files to posix/unix-style forward slashes. Aliased as `unixify` for backwards compatibility. - -**Type**: `Boolean` - -**Default**: `true` on windows, `false` everywhere else. - -**Example** - -```js -console.log(micromatch.match(['a\\b\\c'], 'a/**')); -//=> ['a/b/c'] - -console.log(micromatch.match(['a\\b\\c'], { posixSlashes: false })); -//=> ['a\\b\\c'] -``` - -### options.unescape - -Remove backslashes from escaped glob characters before creating the regular expression to perform matches. - -**Type**: `Boolean` - -**Default**: `undefined` - -**Example** - -In this example we want to match a literal `*`: - -```js -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c')); -//=> ['a\\*c'] - -console.log(micromatch.match(['abc', 'a\\*c'], 'a\\*c', { unescape: true })); -//=> ['a*c'] -``` - -
-
- -## Extended globbing - -Micromatch supports the following extended globbing features. - -### Extglobs - -Extended globbing, as described by the bash man page: - -| **pattern** | **regex equivalent** | **description** | -| --- | --- | --- | -| `?(pattern)` | `(pattern)?` | Matches zero or one occurrence of the given patterns | -| `*(pattern)` | `(pattern)*` | Matches zero or more occurrences of the given patterns | -| `+(pattern)` | `(pattern)+` | Matches one or more occurrences of the given patterns | -| `@(pattern)` | `(pattern)` * | Matches one of the given patterns | -| `!(pattern)` | N/A (equivalent regex is much more complicated) | Matches anything except one of the given patterns | - -* Note that `@` isn't a regex character. - -### Braces - -Brace patterns can be used to match specific ranges or sets of characters. - -**Example** - -The pattern `{f,b}*/{1..3}/{b,q}*` would match any of following strings: - -``` -foo/1/bar -foo/2/bar -foo/3/bar -baz/1/qux -baz/2/qux -baz/3/qux -``` - -Visit [braces](https://github.com/micromatch/braces) to see the full range of features and options related to brace expansion, or to create brace matching or expansion related issues. - -### Regex character classes - -Given the list: `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `[ac].js`: matches both `a` and `c`, returning `['a.js', 'c.js']` -* `[b-d].js`: matches from `b` to `d`, returning `['b.js', 'c.js', 'd.js']` -* `a/[A-Z].js`: matches and uppercase letter, returning `['a/E.md']` - -Learn about [regex character classes](http://www.regular-expressions.info/charclass.html). - -### Regex groups - -Given `['a.js', 'b.js', 'c.js', 'd.js', 'E.js']`: - -* `(a|c).js`: would match either `a` or `c`, returning `['a.js', 'c.js']` -* `(b|d).js`: would match either `b` or `d`, returning `['b.js', 'd.js']` -* `(b|[A-Z]).js`: would match either `b` or an uppercase letter, returning `['b.js', 'E.js']` - -As with regex, parens can be nested, so patterns like `((a|b)|c)/b` will work. Although brace expansion might be friendlier to use, depending on preference. - -### POSIX bracket expressions - -POSIX brackets are intended to be more user-friendly than regex character classes. This of course is in the eye of the beholder. - -**Example** - -```js -console.log(micromatch.isMatch('a1', '[[:alpha:][:digit:]]')) //=> true -console.log(micromatch.isMatch('a1', '[[:alpha:][:alpha:]]')) //=> false -``` - -*** - -## Notes - -### Bash 4.3 parity - -Whenever possible matching behavior is based on behavior Bash 4.3, which is mostly consistent with minimatch. - -However, it's suprising how many edge cases and rabbit holes there are with glob matching, and since there is no real glob specification, and micromatch is more accurate than both Bash and minimatch, there are cases where best-guesses were made for behavior. In a few cases where Bash had no answers, we used wildmatch (used by git) as a fallback. - -### Backslashes - -There is an important, notable difference between minimatch and micromatch _in regards to how backslashes are handled_ in glob patterns. - -* Micromatch exclusively and explicitly reserves backslashes for escaping characters in a glob pattern, even on windows, which is consistent with bash behavior. _More importantly, unescaping globs can result in unsafe regular expressions_. -* Minimatch converts all backslashes to forward slashes, which means you can't use backslashes to escape any characters in your glob patterns. - -We made this decision for micromatch for a couple of reasons: - -* Consistency with bash conventions. -* Glob patterns are not filepaths. They are a type of [regular language](https://en.wikipedia.org/wiki/Regular_language) that is converted to a JavaScript regular expression. Thus, when forward slashes are defined in a glob pattern, the resulting regular expression will match windows or POSIX path separators just fine. - -**A note about joining paths to globs** - -Note that when you pass something like `path.join('foo', '*')` to micromatch, you are creating a filepath and expecting it to still work as a glob pattern. This causes problems on windows, since the `path.sep` is `\\`. - -In other words, since `\\` is reserved as an escape character in globs, on windows `path.join('foo', '*')` would result in `foo\\*`, which tells micromatch to match `*` as a literal character. This is the same behavior as bash. - -To solve this, you might be inspired to do something like `'foo\\*'.replace(/\\/g, '/')`, but this causes another, potentially much more serious, problem. - -## Benchmarks - -### Running benchmarks - -Install dependencies for running benchmarks: - -```sh -$ cd bench && npm install -``` - -Run the benchmarks: - -```sh -$ npm run bench -``` - -### Latest results - -As of August 23, 2024 (longer bars are better): - -```sh -# .makeRe star - micromatch x 2,232,802 ops/sec ±2.34% (89 runs sampled)) - minimatch x 781,018 ops/sec ±6.74% (92 runs sampled)) - -# .makeRe star; dot=true - micromatch x 1,863,453 ops/sec ±0.74% (93 runs sampled) - minimatch x 723,105 ops/sec ±0.75% (93 runs sampled) - -# .makeRe globstar - micromatch x 1,624,179 ops/sec ±2.22% (91 runs sampled) - minimatch x 1,117,230 ops/sec ±2.78% (86 runs sampled)) - -# .makeRe globstars - micromatch x 1,658,642 ops/sec ±0.86% (92 runs sampled) - minimatch x 741,224 ops/sec ±1.24% (89 runs sampled)) - -# .makeRe with leading star - micromatch x 1,525,014 ops/sec ±1.63% (90 runs sampled) - minimatch x 561,074 ops/sec ±3.07% (89 runs sampled) - -# .makeRe - braces - micromatch x 172,478 ops/sec ±2.37% (78 runs sampled) - minimatch x 96,087 ops/sec ±2.34% (88 runs sampled))) - -# .makeRe braces - range (expanded) - micromatch x 26,973 ops/sec ±0.84% (89 runs sampled) - minimatch x 3,023 ops/sec ±0.99% (90 runs sampled)) - -# .makeRe braces - range (compiled) - micromatch x 152,892 ops/sec ±1.67% (83 runs sampled) - minimatch x 992 ops/sec ±3.50% (89 runs sampled)d)) - -# .makeRe braces - nested ranges (expanded) - micromatch x 15,816 ops/sec ±13.05% (80 runs sampled) - minimatch x 2,953 ops/sec ±1.64% (91 runs sampled) - -# .makeRe braces - nested ranges (compiled) - micromatch x 110,881 ops/sec ±1.85% (82 runs sampled) - minimatch x 1,008 ops/sec ±1.51% (91 runs sampled) - -# .makeRe braces - set (compiled) - micromatch x 134,930 ops/sec ±3.54% (63 runs sampled)) - minimatch x 43,242 ops/sec ±0.60% (93 runs sampled) - -# .makeRe braces - nested sets (compiled) - micromatch x 94,455 ops/sec ±1.74% (69 runs sampled)) - minimatch x 27,720 ops/sec ±1.84% (93 runs sampled)) -``` - -## Contributing - -All contributions are welcome! Please read [the contributing guide](.github/contributing.md) to get started. - -**Bug reports** - -Please create an issue if you encounter a bug or matching behavior that doesn't seem correct. If you find a matching-related issue, please: - -* [research existing issues first](../../issues) (open and closed) -* visit the [GNU Bash documentation](https://www.gnu.org/software/bash/manual/) to see how Bash deals with the pattern -* visit the [minimatch](https://github.com/isaacs/minimatch) documentation to cross-check expected behavior in node.js -* if all else fails, since there is no real specification for globs we will probably need to discuss expected behavior and decide how to resolve it. which means any detail you can provide to help with this discussion would be greatly appreciated. - -**Platform issues** - -It's important to us that micromatch work consistently on all platforms. If you encounter any platform-specific matching or path related issues, please let us know (pull requests are also greatly appreciated). - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [braces](https://www.npmjs.com/package/braces): Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support… [more](https://github.com/micromatch/braces) | [homepage](https://github.com/micromatch/braces "Bash-like brace expansion, implemented in JavaScript. Safer than other brace expansion libs, with complete support for the Bash 4.3 braces specification, without sacrificing speed.") -* [expand-brackets](https://www.npmjs.com/package/expand-brackets): Expand POSIX bracket expressions (character classes) in glob patterns. | [homepage](https://github.com/micromatch/expand-brackets "Expand POSIX bracket expressions (character classes) in glob patterns.") -* [extglob](https://www.npmjs.com/package/extglob): Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob… [more](https://github.com/micromatch/extglob) | [homepage](https://github.com/micromatch/extglob "Extended glob support for JavaScript. Adds (almost) the expressive power of regular expressions to glob patterns.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [nanomatch](https://www.npmjs.com/package/nanomatch): Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash… [more](https://github.com/micromatch/nanomatch) | [homepage](https://github.com/micromatch/nanomatch "Fast, minimal glob matcher for node.js. Similar to micromatch, minimatch and multimatch, but complete Bash 4.3 wildcard support only (no support for exglobs, posix brackets or braces)") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 523 | [jonschlinkert](https://github.com/jonschlinkert) | -| 12 | [es128](https://github.com/es128) | -| 9 | [danez](https://github.com/danez) | -| 8 | [doowb](https://github.com/doowb) | -| 6 | [paulmillr](https://github.com/paulmillr) | -| 5 | [mrmlnc](https://github.com/mrmlnc) | -| 3 | [DrPizza](https://github.com/DrPizza) | -| 2 | [Tvrqvoise](https://github.com/Tvrqvoise) | -| 2 | [antonyk](https://github.com/antonyk) | -| 2 | [MartinKolarik](https://github.com/MartinKolarik) | -| 2 | [Glazy](https://github.com/Glazy) | -| 2 | [mceIdo](https://github.com/mceIdo) | -| 2 | [TrySound](https://github.com/TrySound) | -| 1 | [yvele](https://github.com/yvele) | -| 1 | [wtgtybhertgeghgtwtg](https://github.com/wtgtybhertgeghgtwtg) | -| 1 | [simlu](https://github.com/simlu) | -| 1 | [curbengh](https://github.com/curbengh) | -| 1 | [fidian](https://github.com/fidian) | -| 1 | [tomByrer](https://github.com/tomByrer) | -| 1 | [ZoomerTedJackson](https://github.com/ZoomerTedJackson) | -| 1 | [styfle](https://github.com/styfle) | -| 1 | [sebdeckers](https://github.com/sebdeckers) | -| 1 | [muescha](https://github.com/muescha) | -| 1 | [juszczykjakub](https://github.com/juszczykjakub) | -| 1 | [joyceerhl](https://github.com/joyceerhl) | -| 1 | [donatj](https://github.com/donatj) | -| 1 | [frangio](https://github.com/frangio) | -| 1 | [UltCombo](https://github.com/UltCombo) | -| 1 | [DianeLooney](https://github.com/DianeLooney) | -| 1 | [devongovett](https://github.com/devongovett) | -| 1 | [Cslove](https://github.com/Cslove) | -| 1 | [amilajack](https://github.com/amilajack) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2024, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on August 23, 2024._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js deleted file mode 100644 index cb9d9ef..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/index.js +++ /dev/null @@ -1,474 +0,0 @@ -'use strict'; - -const util = require('util'); -const braces = require('braces'); -const picomatch = require('picomatch'); -const utils = require('picomatch/lib/utils'); - -const isEmptyString = v => v === '' || v === './'; -const hasBraces = v => { - const index = v.indexOf('{'); - return index > -1 && v.indexOf('}', index) > -1; -}; - -/** - * Returns an array of strings that match one or more glob patterns. - * - * ```js - * const mm = require('micromatch'); - * // mm(list, patterns[, options]); - * - * console.log(mm(['a.js', 'a.txt'], ['*.js'])); - * //=> [ 'a.js' ] - * ``` - * @param {String|Array} `list` List of strings to match. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) - * @return {Array} Returns an array of matches - * @summary false - * @api public - */ - -const micromatch = (list, patterns, options) => { - patterns = [].concat(patterns); - list = [].concat(list); - - let omit = new Set(); - let keep = new Set(); - let items = new Set(); - let negatives = 0; - - let onResult = state => { - items.add(state.output); - if (options && options.onResult) { - options.onResult(state); - } - }; - - for (let i = 0; i < patterns.length; i++) { - let isMatch = picomatch(String(patterns[i]), { ...options, onResult }, true); - let negated = isMatch.state.negated || isMatch.state.negatedExtglob; - if (negated) negatives++; - - for (let item of list) { - let matched = isMatch(item, true); - - let match = negated ? !matched.isMatch : matched.isMatch; - if (!match) continue; - - if (negated) { - omit.add(matched.output); - } else { - omit.delete(matched.output); - keep.add(matched.output); - } - } - } - - let result = negatives === patterns.length ? [...items] : [...keep]; - let matches = result.filter(item => !omit.has(item)); - - if (options && matches.length === 0) { - if (options.failglob === true) { - throw new Error(`No matches found for "${patterns.join(', ')}"`); - } - - if (options.nonull === true || options.nullglob === true) { - return options.unescape ? patterns.map(p => p.replace(/\\/g, '')) : patterns; - } - } - - return matches; -}; - -/** - * Backwards compatibility - */ - -micromatch.match = micromatch; - -/** - * Returns a matcher function from the given glob `pattern` and `options`. - * The returned function takes a string to match as its only argument and returns - * true if the string is a match. - * - * ```js - * const mm = require('micromatch'); - * // mm.matcher(pattern[, options]); - * - * const isMatch = mm.matcher('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @param {String} `pattern` Glob pattern - * @param {Object} `options` - * @return {Function} Returns a matcher function. - * @api public - */ - -micromatch.matcher = (pattern, options) => picomatch(pattern, options); - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const mm = require('micromatch'); - * // mm.isMatch(string, patterns[, options]); - * - * console.log(mm.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(mm.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `[options]` See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Backwards compatibility - */ - -micromatch.any = micromatch.isMatch; - -/** - * Returns a list of strings that _**do not match any**_ of the given `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.not(list, patterns[, options]); - * - * console.log(mm.not(['a.a', 'b.b', 'c.c'], '*.a')); - * //=> ['b.b', 'c.c'] - * ``` - * @param {Array} `list` Array of strings to match. - * @param {String|Array} `patterns` One or more glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array} Returns an array of strings that **do not match** the given patterns. - * @api public - */ - -micromatch.not = (list, patterns, options = {}) => { - patterns = [].concat(patterns).map(String); - let result = new Set(); - let items = []; - - let onResult = state => { - if (options.onResult) options.onResult(state); - items.push(state.output); - }; - - let matches = new Set(micromatch(list, patterns, { ...options, onResult })); - - for (let item of items) { - if (!matches.has(item)) { - result.add(item); - } - } - return [...result]; -}; - -/** - * Returns true if the given `string` contains the given pattern. Similar - * to [.isMatch](#isMatch) but the pattern can match any part of the string. - * - * ```js - * var mm = require('micromatch'); - * // mm.contains(string, pattern[, options]); - * - * console.log(mm.contains('aa/bb/cc', '*b')); - * //=> true - * console.log(mm.contains('aa/bb/cc', '*d')); - * //=> false - * ``` - * @param {String} `str` The string to match. - * @param {String|Array} `patterns` Glob pattern to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any of the patterns matches any part of `str`. - * @api public - */ - -micromatch.contains = (str, pattern, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - if (Array.isArray(pattern)) { - return pattern.some(p => micromatch.contains(str, p, options)); - } - - if (typeof pattern === 'string') { - if (isEmptyString(str) || isEmptyString(pattern)) { - return false; - } - - if (str.includes(pattern) || (str.startsWith('./') && str.slice(2).includes(pattern))) { - return true; - } - } - - return micromatch.isMatch(str, pattern, { ...options, contains: true }); -}; - -/** - * Filter the keys of the given object with the given `glob` pattern - * and `options`. Does not attempt to match nested keys. If you need this feature, - * use [glob-object][] instead. - * - * ```js - * const mm = require('micromatch'); - * // mm.matchKeys(object, patterns[, options]); - * - * const obj = { aa: 'a', ab: 'b', ac: 'c' }; - * console.log(mm.matchKeys(obj, '*b')); - * //=> { ab: 'b' } - * ``` - * @param {Object} `object` The object with keys to filter. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Object} Returns an object with only keys that match the given patterns. - * @api public - */ - -micromatch.matchKeys = (obj, patterns, options) => { - if (!utils.isObject(obj)) { - throw new TypeError('Expected the first argument to be an object'); - } - let keys = micromatch(Object.keys(obj), patterns, options); - let res = {}; - for (let key of keys) res[key] = obj[key]; - return res; -}; - -/** - * Returns true if some of the strings in the given `list` match any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.some(list, patterns[, options]); - * - * console.log(mm.some(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // true - * console.log(mm.some(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. Returns as soon as the first match is found. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any `patterns` matches any of the strings in `list` - * @api public - */ - -micromatch.some = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (items.some(item => isMatch(item))) { - return true; - } - } - return false; -}; - -/** - * Returns true if every string in the given `list` matches - * any of the given glob `patterns`. - * - * ```js - * const mm = require('micromatch'); - * // mm.every(list, patterns[, options]); - * - * console.log(mm.every('foo.js', ['foo.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js'])); - * // true - * console.log(mm.every(['foo.js', 'bar.js'], ['*.js', '!foo.js'])); - * // false - * console.log(mm.every(['foo.js'], ['*.js', '!foo.js'])); - * // false - * ``` - * @param {String|Array} `list` The string or array of strings to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if all `patterns` matches all of the strings in `list` - * @api public - */ - -micromatch.every = (list, patterns, options) => { - let items = [].concat(list); - - for (let pattern of [].concat(patterns)) { - let isMatch = picomatch(String(pattern), options); - if (!items.every(item => isMatch(item))) { - return false; - } - } - return true; -}; - -/** - * Returns true if **all** of the given `patterns` match - * the specified string. - * - * ```js - * const mm = require('micromatch'); - * // mm.all(string, patterns[, options]); - * - * console.log(mm.all('foo.js', ['foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', '!foo.js'])); - * // false - * - * console.log(mm.all('foo.js', ['*.js', 'foo.js'])); - * // true - * - * console.log(mm.all('foo.js', ['*.js', 'f*', '*o*', '*o.js'])); - * // true - * ``` - * @param {String|Array} `str` The string to test. - * @param {String|Array} `patterns` One or more glob patterns to use for matching. - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -micromatch.all = (str, patterns, options) => { - if (typeof str !== 'string') { - throw new TypeError(`Expected a string: "${util.inspect(str)}"`); - } - - return [].concat(patterns).every(p => picomatch(p, options)(str)); -}; - -/** - * Returns an array of matches captured by `pattern` in `string, or `null` if the pattern did not match. - * - * ```js - * const mm = require('micromatch'); - * // mm.capture(pattern, string[, options]); - * - * console.log(mm.capture('test/*.js', 'test/foo.js')); - * //=> ['foo'] - * console.log(mm.capture('test/*.js', 'foo/bar.css')); - * //=> null - * ``` - * @param {String} `glob` Glob pattern to use for matching. - * @param {String} `input` String to match - * @param {Object} `options` See available [options](#options) for changing how matches are performed - * @return {Array|null} Returns an array of captures if the input matches the glob pattern, otherwise `null`. - * @api public - */ - -micromatch.capture = (glob, input, options) => { - let posix = utils.isWindows(options); - let regex = picomatch.makeRe(String(glob), { ...options, capture: true }); - let match = regex.exec(posix ? utils.toPosixSlashes(input) : input); - - if (match) { - return match.slice(1).map(v => v === void 0 ? '' : v); - } -}; - -/** - * Create a regular expression from the given glob `pattern`. - * - * ```js - * const mm = require('micromatch'); - * // mm.makeRe(pattern[, options]); - * - * console.log(mm.makeRe('*.js')); - * //=> /^(?:(\.[\\\/])?(?!\.)(?=.)[^\/]*?\.js)$/ - * ``` - * @param {String} `pattern` A glob pattern to convert to regex. - * @param {Object} `options` - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -micromatch.makeRe = (...args) => picomatch.makeRe(...args); - -/** - * Scan a glob pattern to separate the pattern into segments. Used - * by the [split](#split) method. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.scan(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -micromatch.scan = (...args) => picomatch.scan(...args); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const mm = require('micromatch'); - * const state = mm.parse(pattern[, options]); - * ``` - * @param {String} `glob` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as regex source string. - * @api public - */ - -micromatch.parse = (patterns, options) => { - let res = []; - for (let pattern of [].concat(patterns || [])) { - for (let str of braces(String(pattern), options)) { - res.push(picomatch.parse(str, options)); - } - } - return res; -}; - -/** - * Process the given brace `pattern`. - * - * ```js - * const { braces } = require('micromatch'); - * console.log(braces('foo/{a,b,c}/bar')); - * //=> [ 'foo/(a|b|c)/bar' ] - * - * console.log(braces('foo/{a,b,c}/bar', { expand: true })); - * //=> [ 'foo/a/bar', 'foo/b/bar', 'foo/c/bar' ] - * ``` - * @param {String} `pattern` String with brace pattern to process. - * @param {Object} `options` Any [options](#options) to change how expansion is performed. See the [braces][] library for all available options. - * @return {Array} - * @api public - */ - -micromatch.braces = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - if ((options && options.nobrace === true) || !hasBraces(pattern)) { - return [pattern]; - } - return braces(pattern, options); -}; - -/** - * Expand braces - */ - -micromatch.braceExpand = (pattern, options) => { - if (typeof pattern !== 'string') throw new TypeError('Expected a string'); - return micromatch.braces(pattern, { ...options, expand: true }); -}; - -/** - * Expose micromatch - */ - -// exposed for tests -micromatch.hasBraces = hasBraces; -module.exports = micromatch; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json deleted file mode 100644 index d5558bb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/micromatch/package.json +++ /dev/null @@ -1,119 +0,0 @@ -{ - "name": "micromatch", - "description": "Glob matching for javascript/node.js. A replacement and faster alternative to minimatch and multimatch.", - "version": "4.0.8", - "homepage": "https://github.com/micromatch/micromatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "(https://github.com/DianeLooney)", - "Amila Welihinda (amilajack.com)", - "Bogdan Chadkin (https://github.com/TrySound)", - "Brian Woodward (https://twitter.com/doowb)", - "Devon Govett (http://badassjs.com)", - "Elan Shanker (https://github.com/es128)", - "Fabrício Matté (https://ultcombo.js.org)", - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Martin Kolárik (https://kolarik.sk)", - "Olsten Larck (https://i.am.charlike.online)", - "Paul Miller (paulmillr.com)", - "Tom Byrer (https://github.com/tomByrer)", - "Tyler Akins (http://rumkin.com)", - "Peter Bright (https://github.com/drpizza)", - "Kuba Juszczyk (https://github.com/ku8ar)" - ], - "repository": "micromatch/micromatch", - "bugs": { - "url": "https://github.com/micromatch/micromatch/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "devDependencies": { - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "minimatch": "^5.0.1", - "mocha": "^9.2.2", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "bash", - "bracket", - "character-class", - "expand", - "expansion", - "expression", - "extglob", - "extglobs", - "file", - "files", - "filter", - "find", - "glob", - "globbing", - "globs", - "globstar", - "lookahead", - "lookaround", - "lookbehind", - "match", - "matcher", - "matches", - "matching", - "micromatch", - "minimatch", - "multimatch", - "negate", - "negation", - "path", - "pattern", - "patterns", - "posix", - "regex", - "regexp", - "regular", - "shell", - "star", - "wildcard" - ], - "verb": { - "toc": "collapsible", - "layout": "default", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "nanomatch" - ] - }, - "reflinks": [ - "extglob", - "fill-range", - "glob-object", - "minimatch", - "multimatch" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts deleted file mode 100644 index b4047d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -declare const mimicFn: { - /** - Make a function mimic another one. It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. - - @param to - Mimicking function. - @param from - Function to mimic. - @returns The modified `to` function. - - @example - ``` - import mimicFn = require('mimic-fn'); - - function foo() {} - foo.unicorn = '🦄'; - - function wrapper() { - return foo(); - } - - console.log(wrapper.name); - //=> 'wrapper' - - mimicFn(wrapper, foo); - - console.log(wrapper.name); - //=> 'foo' - - console.log(wrapper.unicorn); - //=> '🦄' - ``` - */ - < - ArgumentsType extends unknown[], - ReturnType, - FunctionType extends (...arguments: ArgumentsType) => ReturnType - >( - to: (...arguments: ArgumentsType) => ReturnType, - from: FunctionType - ): FunctionType; - - // TODO: Remove this for the next major release, refactor the whole definition to: - // declare function mimicFn< - // ArgumentsType extends unknown[], - // ReturnType, - // FunctionType extends (...arguments: ArgumentsType) => ReturnType - // >( - // to: (...arguments: ArgumentsType) => ReturnType, - // from: FunctionType - // ): FunctionType; - // export = mimicFn; - default: typeof mimicFn; -}; - -export = mimicFn; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js deleted file mode 100644 index 1a59705..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/index.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict'; - -const mimicFn = (to, from) => { - for (const prop of Reflect.ownKeys(from)) { - Object.defineProperty(to, prop, Object.getOwnPropertyDescriptor(from, prop)); - } - - return to; -}; - -module.exports = mimicFn; -// TODO: Remove this for the next major release -module.exports.default = mimicFn; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json deleted file mode 100644 index 199d2c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "mimic-fn", - "version": "2.1.0", - "description": "Make a function mimic another one", - "license": "MIT", - "repository": "sindresorhus/mimic-fn", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "function", - "mimic", - "imitate", - "rename", - "copy", - "inherit", - "properties", - "name", - "func", - "fn", - "set", - "infer", - "change" - ], - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md deleted file mode 100644 index 0ef8a13..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/mimic-fn/readme.md +++ /dev/null @@ -1,69 +0,0 @@ -# mimic-fn [![Build Status](https://travis-ci.org/sindresorhus/mimic-fn.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-fn) - -> Make a function mimic another one - -Useful when you wrap a function in another function and like to preserve the original name and other properties. - - -## Install - -``` -$ npm install mimic-fn -``` - - -## Usage - -```js -const mimicFn = require('mimic-fn'); - -function foo() {} -foo.unicorn = '🦄'; - -function wrapper() { - return foo(); -} - -console.log(wrapper.name); -//=> 'wrapper' - -mimicFn(wrapper, foo); - -console.log(wrapper.name); -//=> 'foo' - -console.log(wrapper.unicorn); -//=> '🦄' -``` - - -## API - -It will copy over the properties `name`, `length`, `displayName`, and any custom properties you may have set. - -### mimicFn(to, from) - -Modifies the `to` function and returns it. - -#### to - -Type: `Function` - -Mimicking function. - -#### from - -Type: `Function` - -Function to mimic. - - -## Related - -- [rename-fn](https://github.com/sindresorhus/rename-fn) - Rename a function -- [keep-func-props](https://github.com/ehmicky/keep-func-props) - Wrap a function without changing its name, length and other properties - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml deleted file mode 100644 index 5c04817..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: c -services: docker -os: linux -env: - - PACKAGE=python TARGET=linux64 MODE=Debug - - PACKAGE=dune-freetype TARGET=linux64 MODE=Debug - - PACKAGE=haxx-libcurl TARGET=linux64 MODE=Debug - - PACKAGE=fmod TARGET=linux64 MODE=Debug - - PACKAGE=intel-tbb TARGET=linux64 MODE=Debug - - PACKAGE=cryptopp TARGET=linux64 MODE=Debug - - PACKAGE=ois TARGET=linux64 MODE=Debug - - PACKAGE=bullet2 TARGET=linux64 MODE=Debug -script: - - PACKAGE=$PACKAGE make $TARGET - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE deleted file mode 100644 index 9e0fb24..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile deleted file mode 100644 index bfa5464..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/Makefile +++ /dev/null @@ -1,21 +0,0 @@ -PACKAGE ?= . -MODE ?= Debug - -all: clean build - -build: - (cd cmaki_identifier && npm install --unsafe-perm) - (cd cmaki_generator && ./build ${PACKAGE} -d) - -clean: - (cd cmaki_identifier && rm -Rf bin rm -Rf artifacts) - -linux64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} linux64 make - -windows64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} windows64 make - -android64: - docker-compose run --rm -e PACKAGE=${PACKAGE} -e MODE=${MODE} android64 make - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README deleted file mode 100644 index 75a7863..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/README +++ /dev/null @@ -1,57 +0,0 @@ -# fusion projects in one product - -- cmaki -- cmaki_scripts -- cmaki_identifier -- cmaki_docker -- cmaki_generator -- servfactor - -# variables de entorno -- Servidor de artefactos: -- NPP_SERVER = htpp://.... - -- Modo de compilación: -- NPP_MODE = Debug, Release .... - -- Directorio de instalación: -- NPP_INSTALL - -- Utilizar artefactos cacheados o compilar siempre: -- NPP_CACHE=TRUE/FALSE - - - - - - -refactor cmake ------------------------------------ - -cmaki_library ---------> npp_shared -cmaki_static_library --> npp_static -cmaki_executable ------> npp_executable -cmaki_test ------------> npp_test -cmaki_google_test -----> npp_google_test -cmaki_python_test -----> npp_python_test - - - -Comandos uso ------------- -npm install -npm test - -npm run create # crear package -npm run upload # subir package - - -windows environment ------------------- -visual studio 2019 -mini conda -npm -cmake -pip install conan -chocolatey -choco install tortoisegit diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml deleted file mode 100644 index 44de95c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: c -services: docker -os: linux -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake deleted file mode 100644 index 4bfc61e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/GitUtils.cmake +++ /dev/null @@ -1,157 +0,0 @@ -cmake_minimum_required(VERSION 2.8.7) - -include("${CMAKE_CURRENT_LIST_DIR}/Utils.cmake") -include(CMakeParseArguments) - -find_package(Git) -if(NOT GIT_FOUND) - message(FATAL_ERROR "git not found!") -endif() - - -# clone a git repo into a directory at configure time -# this can be useful for including cmake-library projects that contain *.cmake files -# the function will automatically init git submodules too -# -# ATTENTION: CMakeLists-files in the cloned repo will NOT be build automatically -# -# why not use ExternalProject_Add you ask? because we need to run this at configure time -# -# USAGE: -# git_clone( -# PROJECT_NAME -# GIT_URL -# [GIT_TAG|GIT_BRANCH|GIT_COMMIT ] -# [DIRECTORY ] -# [QUIET] -# ) -# -# -# ARGUMENTS: -# PROJECT_NAME -# name of the project that will be used in output variables. -# must be the same as the git directory/repo name -# -# GIT_URL -# url to the git repo -# -# GIT_TAG|GIT_BRANCH|GIT_COMMIT -# optional -# the tag/branch/commit to checkout -# default is master -# -# DIRECTORY -# optional -# the directory the project will be cloned into -# default is the build directory, similar to ExternalProject (${CMAKE_BINARY_DIR}) -# -# QUIET -# optional -# don't print status messages -# -# -# OUTPUT VARIABLES: -# _SOURCE_DIR -# top level source directory of the cloned project -# -# -# EXAMPLE: -# git_clone( -# PROJECT_NAME testProj -# GIT_URL https://github.com/test/test.git -# GIT_COMMIT a1b2c3 -# DIRECTORY ${CMAKE_BINARY_DIR} -# QUIET -# ) -# -# include(${testProj_SOURCE_DIR}/cmake/myFancyLib.cmake) - -function(cmaki_git_clone) - - cmake_parse_arguments( - PARGS # prefix of output variables - "QUIET" # list of names of the boolean arguments (only defined ones will be true) - "PROJECT_NAME;GIT_URL;GIT_TAG;GIT_BRANCH;GIT_COMMIT;DIRECTORY" # list of names of mono-valued arguments - "" # list of names of multi-valued arguments (output variables are lists) - ${ARGN} # arguments of the function to parse, here we take the all original ones - ) # remaining unparsed arguments can be found in PARGS_UNPARSED_ARGUMENTS - - if(NOT PARGS_PROJECT_NAME) - message(FATAL_ERROR "You must provide a project name") - endif() - - if(NOT PARGS_GIT_URL) - message(FATAL_ERROR "You must provide a git url") - endif() - - if(NOT PARGS_DIRECTORY) - set(PARGS_DIRECTORY ${CMAKE_BINARY_DIR}) - endif() - - set(${PARGS_PROJECT_NAME}_SOURCE_DIR - ${PARGS_DIRECTORY}/${PARGS_PROJECT_NAME} - CACHE INTERNAL "" FORCE) # makes var visible everywhere because PARENT_SCOPE wouldn't include this scope - - set(SOURCE_DIR ${PARGS_PROJECT_NAME}_SOURCE_DIR) - - # check that only one of GIT_TAG xor GIT_BRANCH xor GIT_COMMIT was passed - at_most_one(at_most_one_tag ${PARGS_GIT_TAG} ${PARGS_GIT_BRANCH} ${PARGS_GIT_COMMIT}) - - if(NOT at_most_one_tag) - message(FATAL_ERROR "you can only provide one of GIT_TAG, GIT_BRANCH or GIT_COMMIT") - endif() - - if(NOT PARGS_QUIET) - message(STATUS "downloading/updating ${PARGS_PROJECT_NAME}") - endif() - - # first clone the repo - if(EXISTS ${${SOURCE_DIR}}) - if(NOT PARGS_QUIET) - message(STATUS "${PARGS_PROJECT_NAME} directory found, pulling...") - endif() - - execute_process( - COMMAND ${GIT_EXECUTABLE} pull origin master - COMMAND ${GIT_EXECUTABLE} submodule update --remote - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - else() - if(NOT PARGS_QUIET) - message(STATUS "${PARGS_PROJECT_NAME} directory not found, cloning...") - endif() - - execute_process( - COMMAND ${GIT_EXECUTABLE} clone ${PARGS_GIT_URL} --recursive - WORKING_DIRECTORY ${PARGS_DIRECTORY} - OUTPUT_VARIABLE git_output) - endif() - - if(NOT PARGS_QUIET) - message("${git_output}") - endif() - - # now checkout the right commit - if(PARGS_GIT_TAG) - execute_process( - COMMAND ${GIT_EXECUTABLE} fetch --all --tags --prune - COMMAND ${GIT_EXECUTABLE} checkout tags/${PARGS_GIT_TAG} -b tag_${PARGS_GIT_TAG} - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - elseif(PARGS_GIT_BRANCH OR PARGS_GIT_COMMIT) - execute_process( - COMMAND ${GIT_EXECUTABLE} checkout ${PARGS_GIT_BRANCH} - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - else() - message(STATUS "no tag specified, defaulting to master") - execute_process( - COMMAND ${GIT_EXECUTABLE} checkout master - WORKING_DIRECTORY ${${SOURCE_DIR}} - OUTPUT_VARIABLE git_output) - endif() - - if(NOT PARGS_QUIET) - message("${git_output}") - endif() -endfunction() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE deleted file mode 100644 index 7e79e4d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md deleted file mode 100644 index 9d7b1b0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# :construction: I am under construction [![npm version](https://badge.fury.io/js/cmaki.svg)](https://badge.fury.io/js/cmaki) -Don't use it [![Build Status](https://travis-ci.org/makiolo/cmaki.svg?branch=master)](https://travis-ci.org/makiolo/cmaki) -# quick -bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/bootstrap.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake deleted file mode 100644 index a76708c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/Utils.cmake +++ /dev/null @@ -1,32 +0,0 @@ -# returns true if only a single one of its arguments is true -function(xor result) - set(true_args_count 0) - - foreach(foo ${ARGN}) - if(foo) - math(EXPR true_args_count "${true_args_count}+1") - endif() - endforeach() - - if(NOT (${true_args_count} EQUAL 1)) - set(${result} FALSE PARENT_SCOPE) - else() - set(${result} TRUE PARENT_SCOPE) - endif() -endfunction() - -function(at_most_one result) - set(true_args_count 0) - - foreach(foo ${ARGN}) - if(foo) - math(EXPR true_args_count "${true_args_count}+1") - endif() - endforeach() - - if(${true_args_count} GREATER 1) - set(${result} FALSE PARENT_SCOPE) - else() - set(${result} TRUE PARENT_SCOPE) - endif() -endfunction() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh deleted file mode 100755 index faeadbd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/ci/detect_operative_system.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" - -if [[ "$WINEARCH" = "win32" ]]; then - wine $CMAKI_INSTALL/cmaki_identifier.exe -else - $CMAKI_EMULATOR $CMAKI_INSTALL/cmaki_identifier -fi - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake deleted file mode 100644 index 74b034f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/cmaki.cmake +++ /dev/null @@ -1,529 +0,0 @@ -if(NOT DEFINED CMAKE_MODULE_PATH) - set(CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}) -endif() - -IF(NOT DEFINED CMAKI_PATH) - set(CMAKI_PATH ${CMAKE_CURRENT_LIST_DIR}) -ENDIF() - -include("${CMAKE_CURRENT_LIST_DIR}/facts/facts.cmake") -include("${CMAKE_CURRENT_LIST_DIR}/GitUtils.cmake") - -option(FIRST_ERROR "stop on first compilation error" FALSE) - -macro(cmaki_setup) - enable_modern_cpp() - enable_testing() - SET(CMAKE_BUILD_TYPE_INIT Release) - set(CMAKE_CXX_STANDARD 14) - set(CMAKE_CXX_STANDARD_REQUIRED ON) - set(CMAKE_CXX_EXTENSIONS ON) - IF(WITH_CONAN) - # Conan - message("-- Using conan dir: ${CMAKE_BINARY_DIR}") - include("${CMAKE_BINARY_DIR}/conanbuildinfo.cmake") - conan_basic_setup() - ENDIF() -endmacro() - -macro (mark_as_internal _var) - set(${_var} ${${_var}} CACHE INTERNAL "hide this!" FORCE) -endmacro(mark_as_internal _var) - -macro (option_combobox _var options default_option comment) - set(${_var} "${default_option}" CACHE STRING "${comment}") - set(${_var}Values "${options}" CACHE INTERNAL "hide this!" FORCE) - set_property(CACHE ${_var} PROPERTY STRINGS ${${_var}Values}) -endmacro() - -function(cmaki_install_file FROM) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_file_into FROM TO) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_file_and_rename FROM NEWNAME) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) - endforeach() -endfunction() - -function(cmaki_install_file_into_and_rename FROM TO NEWNAME) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL(FILES ${FROM} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE} RENAME ${NEWNAME}) - endforeach() -endfunction() - -function(cmaki_install_files FROM) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - FILE(GLOB files ${FROM}) - INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_install_files_into FROM TO) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - FILE(GLOB files ${FROM}) - INSTALL(FILES ${files} DESTINATION ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE}/${TO} CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -macro(cmaki_install_inside_dir _DESTINE) - file(GLOB DEPLOY_FILES_AND_DIRS "${_DESTINE}/*") - foreach(ITEM ${DEPLOY_FILES_AND_DIRS}) - IF( IS_DIRECTORY "${ITEM}" ) - LIST( APPEND DIRS_TO_DEPLOY "${ITEM}" ) - ELSE() - IF(ITEM STREQUAL "${_DESTINE}/CMakeLists.txt") - MESSAGE("skipped file: ${_DESTINE}/CMakeLists.txt") - ELSE() - LIST(APPEND FILES_TO_DEPLOY "${ITEM}") - ENDIF() - ENDIF() - endforeach() - INSTALL(FILES ${FILES_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) - INSTALL(DIRECTORY ${DIRS_TO_DEPLOY} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) -endmacro() - -macro(cmaki_install_dir _DESTINE) - INSTALL(DIRECTORY ${_DESTINE} DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE} USE_SOURCE_PERMISSIONS) -endmacro() - -macro(cmaki_parse_parameters) - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 _MAIN_NAME) - list(REMOVE_AT PARAMETERS 0) - SET(HAVE_TESTS FALSE) - SET(HAVE_PCH FALSE) - SET(HAVE_PTHREADS FALSE) - set(_DEPENDS) - set(_SOURCES) - set(_TESTS) - set(_PCH) - set(_INCLUDES) - set(_SUFFIX_DESTINATION) - set(NOW_IN SOURCES) - while(PARAMETERS) - list(GET PARAMETERS 0 PARM) - if(PARM STREQUAL DEPENDS) - set(NOW_IN DEPENDS) - elseif(PARM STREQUAL SOURCES) - set(NOW_IN SOURCES) - elseif(PARM STREQUAL TESTS) - set(NOW_IN TESTS) - elseif(PARM STREQUAL PCH) - set(NOW_IN PCH) - elseif(PARM STREQUAL PTHREADS) - if(NOT WIN32) - # no enabled in windows - set(HAVE_PTHREADS TRUE) - endif() - elseif(PARM STREQUAL INCLUDES) - set(NOW_IN INCLUDES) - elseif(PARM STREQUAL DESTINATION) - set(NOW_IN DESTINATION) - else() - if(NOW_IN STREQUAL DEPENDS) - set(_DEPENDS ${_DEPENDS} ${PARM}) - elseif(NOW_IN STREQUAL SOURCES) - set(_SOURCES ${_SOURCES} ${PARM}) - elseif(NOW_IN STREQUAL TESTS) - set(_TESTS ${_TESTS} ${PARM}) - SET(HAVE_TESTS TRUE) - elseif(NOW_IN STREQUAL PCH) - set(_PCH ${PARM}) - SET(HAVE_PCH TRUE) - elseif(NOW_IN STREQUAL INCLUDES) - set(_INCLUDES ${_INCLUDES} ${PARM}) - elseif(NOW_IN STREQUAL DESTINATION) - set(_SUFFIX_DESTINATION ${PARM}) - else() - message(FATAL_ERROR "Unknown argument ${PARM}.") - endif() - endif() - list(REMOVE_AT PARAMETERS 0) - endwhile() -endmacro() - -function(cmaki_simple_executable) - cmaki_parse_parameters(${ARGV}) - set(_EXECUTABLE_NAME ${_MAIN_NAME}) - MESSAGE("++ executable ${_EXECUTABLE_NAME}") - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_EXECUTABLE_NAME}) - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_EXECUTABLE_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - if(WIN32) - ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) - else() - ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) - endif() - target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_EXECUTABLE_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_EXECUTABLE_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_clang() - -endfunction() - -function(cmaki_simple_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - MESSAGE("++ library ${_LIBRARY_NAME}") - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_LIBRARY_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_clang() - -endfunction() - -function(cmaki_simple_test) - cmaki_parse_parameters(${ARGV}) - set(_TEST_NAME ${_MAIN_NAME}) - common_flags() - common_linking(${_TEST_NAME}) - MESSAGE("++ test ${_TEST_NAME}") - foreach(INCLUDE_DIR ${_INCLUDES}) - target_include_directories(${_TEST_NAME} ${INCLUDE_DIR}) - endforeach() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_executable(${_TEST_NAME} ${_SOURCES}) - target_link_libraries(${_TEST_NAME} ${_DEPENDS}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_TEST_NAME} -lpthread) - endif() - endif() - common_linking(${_TEST_NAME}) - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_TEST_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - if(WIN32) - add_test( - NAME ${_TEST_NAME}__ - COMMAND ${_TEST_NAME} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - - if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - find_program(VALGRIND "valgrind") - if(VALGRIND) - add_test( - NAME ${_TEST_NAME}_memcheck - COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_cachegrind - COMMAND "${VALGRIND}" --tool=cachegrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_helgrind - COMMAND "${VALGRIND}" --tool=helgrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_callgrind - COMMAND "${VALGRIND}" --tool=callgrind $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_drd - COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - message(FATAL_ERROR "no valgrind detected") - endif() - else() - add_test( - NAME ${_TEST_NAME}_test - COMMAND bash cmaki_emulator.sh $ - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - CONFIGURATIONS ${BUILD_TYPE}) - endif() - endif() - endforeach() - generate_vcxproj_user(${_TEST_NAME}) - generate_clang() - -endfunction() - -macro(common_linking) - - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 TARGET) - # if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - # target_link_libraries(${TARGET} -lubsan) - # endif() - -endmacro() - -macro(common_flags) - - if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) - add_definitions(/wd4251) - add_definitions(/wd4275) - add_definitions(/wd4239) - add_definitions(/wd4316) - add_definitions(/wd4127) - add_definitions(/wd4245) - add_definitions(/wd4458) - add_definitions(/wd4146) - add_definitions(/wd4244) - add_definitions(/wd4189) - add_definitions(/wd4100) - add_definitions(/wd4706) - add_definitions(/WX /W4) - add_definitions(-Zm200) - endif() - - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") - endif() - -endmacro() - -macro(enable_modern_cpp) - - if(WIN32 AND (NOT MINGW) AND (NOT MSYS)) - add_definitions(/EHsc) - add_definitions(/D_SCL_SECURE_NO_WARNINGS) - else() - # add_definitions(-fno-rtti -fno-exceptions ) - # activate all warnings and convert in errors - # add_definitions(-Weffc++) - # add_definitions(-pedantic -pedantic-errors) - - # Python: need disabling: initialization discards ‘const’ qualifier from pointer target type - # add_definitions(-Werror) - - add_definitions(-Wall -Wextra -Waggregate-return -Wcast-align -Wcast-qual -Wconversion) - add_definitions(-Wdisabled-optimization -Wformat=2 -Wformat-nonliteral -Wformat-security -Wformat-y2k) - add_definitions(-Wimport -Winit-self -Winline -Winvalid-pch -Wlong-long -Wmissing-field-initializers -Wmissing-format-attribute) - add_definitions(-Wpointer-arith -Wredundant-decls -Wshadow) - add_definitions(-Wstack-protector -Wunreachable-code -Wunused) - add_definitions(-Wunused-parameter -Wvariadic-macros -Wwrite-strings) - add_definitions(-Wswitch-default -Wswitch-enum) - # only gcc - # convert error in warnings - add_definitions(-Wno-error=shadow) - add_definitions(-Wno-error=long-long) - add_definitions(-Wno-error=aggregate-return) - add_definitions(-Wno-error=unused-variable) - add_definitions(-Wno-error=unused-parameter) - add_definitions(-Wno-error=deprecated-declarations) - add_definitions(-Wno-error=missing-include-dirs) - add_definitions(-Wno-error=packed) - add_definitions(-Wno-error=switch-default) - add_definitions(-Wno-error=float-equal) - add_definitions(-Wno-error=invalid-pch) - add_definitions(-Wno-error=cast-qual) - add_definitions(-Wno-error=conversion) - add_definitions(-Wno-error=switch-enum) - add_definitions(-Wno-error=redundant-decls) - add_definitions(-Wno-error=stack-protector) - add_definitions(-Wno-error=extra) - add_definitions(-Wno-error=unused-result) - add_definitions(-Wno-error=sign-compare) - - # raknet - add_definitions(-Wno-error=address) - add_definitions(-Wno-error=cast-qual) - add_definitions(-Wno-error=missing-field-initializers) - add_definitions(-Wno-error=write-strings) - add_definitions(-Wno-error=format-nonliteral) - - # sdl2 - add_definitions(-Wno-error=sign-conversion) - - # TODO: remove - add_definitions(-Wno-error=reorder) - - # if not have openmp - add_definitions(-Wno-error=unknown-pragmas) - - if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") - add_definitions(-Wno-error=suggest-attribute=format) - add_definitions(-Wno-error=suggest-attribute=noreturn) - add_definitions(-Wno-aggregate-return) - add_definitions(-Wno-long-long) - add_definitions(-Wno-shadow) - add_definitions(-Wno-strict-aliasing) - add_definitions(-Wno-error=inline) - add_definitions(-Wno-error=maybe-uninitialized) - add_definitions(-Wno-error=unused-but-set-variable) - add_definitions(-Wno-error=unused-local-typedefs) - # add_definitions(-Wno-error=float-conversion) - else() - add_definitions(-Wstrict-aliasing=2) - add_definitions(-Wno-error=format-nonliteral) - add_definitions(-Wno-error=cast-align) - add_definitions(-Wno-error=deprecated-register) - add_definitions(-Wno-error=mismatched-tags) - add_definitions(-Wno-error=overloaded-virtual) - add_definitions(-Wno-error=unused-private-field) - add_definitions(-Wno-error=unreachable-code) - # add_definitions(-Wno-error=discarded-qualifiers) - endif() - - # In Linux default now is not export symbols - # add_definitions(-fvisibility=hidden) - - # stop in first error - if(FIRST_ERROR) - add_definitions(-Wfatal-errors) - endif() - - endif() - - if (NOT DEFINED EXTRA_DEF) - if(NOT WIN32 OR MINGW OR MSYS) - include(CheckCXXCompilerFlag) - CHECK_CXX_COMPILER_FLAG("-std=c++14" COMPILER_SUPPORTS_CXX14) - CHECK_CXX_COMPILER_FLAG("-std=c++1y" COMPILER_SUPPORTS_CXX1Y) - CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) - CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) - - if(COMPILER_SUPPORTS_CXX14) - set(CMAKE_CXX_STANDARD 14) - message("-- C++14 Enabled") - elseif(COMPILER_SUPPORTS_CXX11) - set(CMAKE_CXX_STANDARD 11) - message("-- C++11 Enabled") - elseif(COMPILER_SUPPORTS_CXX0X) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") - message("-- C++0x Enabled") - else() - message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") - endif() - endif() - else() - add_definitions(${EXTRA_DEF}) - endif() - - # TODO: need different combinations of artifacts (coverage=off / coverage=on, etc ...) - # if ((DEFINED COVERAGE) AND (COVERAGE STREQUAL "TRUE")) - # https://github.com/google/sanitizers/wiki/AddressSanitizerAsDso - # flags - if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O0 --coverage") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-elide-constructors") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-inline") - endif() - - # linker flags - if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND (CMAKE_BUILD_TYPE STREQUAL "Debug")) - SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage") - endif() - # endif() - -endmacro() - -macro(generate_vcxproj_user _EXECUTABLE_NAME) - IF(MSVC) - set(project_vcxproj_user "${CMAKE_CURRENT_BINARY_DIR}/${_EXECUTABLE_NAME}.vcxproj.user") - if (NOT EXISTS ${project_vcxproj_user}) - FILE(WRITE "${project_vcxproj_user}" - "\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n" - "$(TargetDir)\n" - "WindowsLocalDebugger\n" - "\n" - "\n") - endif() - ENDIF() -endmacro() - -macro(generate_clang) - # Generate .clang_complete for full completation in vim + clang_complete - set(extra_parameters "") - get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY INCLUDE_DIRECTORIES) - foreach(dir ${dirs}) - set(extra_parameters ${extra_parameters} -I${dir}) - endforeach() - get_property(dirs DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY COMPILE_DEFINITIONS) - foreach(dir ${dirs}) - set(extra_parameters ${extra_parameters} -D${dir}) - endforeach() - STRING(REGEX REPLACE ";" "\n" extra_parameters "${extra_parameters}") - FILE(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/.clang_complete" "${extra_parameters}\n") -endmacro() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake deleted file mode 100644 index b5409fd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/facts/facts.cmake +++ /dev/null @@ -1,735 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) -cmake_policy(SET CMP0045 OLD) - -find_program(PYTHON_EXECUTABLE NAMES python3.6 python3.5 python3 python) - -IF(NOT DEFINED CMAKI_PWD) - set(CMAKI_PWD $ENV{CMAKI_PWD}) -ENDIF() - -IF(NOT DEFINED CMAKI_INSTALL) - set(CMAKI_INSTALL $ENV{CMAKI_INSTALL}) -ENDIF() - -IF(NOT DEFINED NPP_ARTIFACTS_PATH) - set(NPP_ARTIFACTS_PATH ${CMAKI_PWD}/artifacts) -ENDIF() - -IF(NOT DEFINED CMAKE_PREFIX_PATH) - set(CMAKE_PREFIX_PATH ${NPP_ARTIFACTS_PATH}/cmaki_find_package) -ENDIF() - -IF(NOT DEFINED NPP_GENERATOR_PATH) - set(NPP_GENERATOR_PATH ${CMAKI_PATH}/../cmaki_generator) -ENDIF() - -IF(NOT DEFINED NPP_PACKAGE_JSON_FILE) - set(NPP_PACKAGE_JSON_FILE ${CMAKI_PATH}/../../artifacts.json) -ENDIF() - -if(NOT DEFINED CMAKI_IDENTIFIER OR NOT DEFINED CMAKI_PLATFORM) - set(ENV{CMAKI_INFO} ALL) - include(${CMAKI_PWD}/bin/cmaki_identifier.cmake) - set(CMAKI_IDENTIFIER "${PLATFORM}") - set(CMAKI_PLATFORM "${PLATFORM}") -endif() - -MESSAGE("CMAKI_PWD = ${CMAKI_PWD}") -MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") -MESSAGE("CMAKI_PATH = ${CMAKI_PATH}") -MESSAGE("NPP_ARTIFACTS_PATH = ${NPP_ARTIFACTS_PATH}") -MESSAGE("NPP_GENERATOR_PATH = ${NPP_GENERATOR_PATH}") -MESSAGE("NPP_PACKAGE_JSON_FILE = ${NPP_PACKAGE_JSON_FILE}") -MESSAGE("CMAKE_PREFIX_PATH = ${CMAKE_PREFIX_PATH}") -MESSAGE("CMAKE_MODULE_PATH = ${CMAKE_MODULE_PATH}") -MESSAGE("CMAKI_IDENTIFIER = ${CMAKI_IDENTIFIER}") -MESSAGE("CMAKI_PLATFORM = ${CMAKI_PLATFORM}") - -function(cmaki_find_package) - - message("-- begin cmaki_find_package") - - set(PARAMETERS ${ARGV}) - list(LENGTH PARAMETERS ARGV_LENGTH) - list(GET PARAMETERS 0 PACKAGE) - set(VERSION_REQUEST "") - set(CALL_RECURSIVE "TRUE") - set(PARM1 "") - if(ARGV_LENGTH GREATER 1) - list(GET PARAMETERS 1 PARM1) - message("-- extra parm1: ${PARM1}") - if(PARM1 STREQUAL "NONRECURSIVE") - message("${PACKAGE} is not recursive") - set(CALL_RECURSIVE "FALSE") - else() - message("${PACKAGE} is recursive") - set(VERSION_REQUEST "${PARM1}") - endif() - endif() - - IF(NOT DEFINED CMAKI_REPOSITORY) - set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") - ENDIF() - - # 2.5. define flags - set(FORCE_GENERATION NOT "$ENV{NPP_CACHE}") - - if(VERSION_REQUEST STREQUAL "") - ## - message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE}") - ## - # 1. obtener la version actual (o ninguno en caso de no tener el artefacto) - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/get_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - set(VERSION_REQUEST "${RESULT_VERSION}") - set(EXTRA_VERSION "--version=${VERSION_REQUEST}") - else() - set(VERSION_REQUEST "") - set(EXTRA_VERSION "") - endif() - - else() - # explicit version required from parameters - set(EXTRA_VERSION "--version=${VERSION_REQUEST}") - endif() - - message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION}") - ####################################################### - # 2. obtener la mejor version buscando en la cache local y remota - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} ${EXTRA_VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - list(GET RESULT_VERSION 0 PACKAGE_MODE) - list(GET RESULT_VERSION 1 PACKAGE_NAME) - list(GET RESULT_VERSION 2 VERSION) - message("now PACKAGE_MODE = ${PACKAGE_MODE}") - message("now PACKAGE_NAME = ${PACKAGE_NAME}") - message("now VERSION = ${VERSION}") - if(PACKAGE_MODE STREQUAL "UNSUITABLE") - set(PACKAGE_MODE "EXACT") - set(VERSION ${VERSION_REQUEST}) - message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 1)") - # avoid remote cache, need build - set(FORCE_GENERATION "TRUE") - endif() - else() - set(PACKAGE_MODE "EXACT") - set(VERSION ${VERSION_REQUEST}) - message("-- need build package ${PACKAGE} can't get version: ${VERSION_REQUEST}, will be generated... (error 2)") - # avoid remote cache, need build - set(FORCE_GENERATION "TRUE") - endif() - ####################################################### - - # cmaki_find_package of depends - message("COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet") - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} --plan --quiet - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE DEPENDS_PACKAGES - OUTPUT_STRIP_TRAILING_WHITESPACE) - - if("${CALL_RECURSIVE}") - foreach(DEP ${DEPENDS_PACKAGES}) - if(PACKAGE STREQUAL "${DEP}") - message("-- skip: ${DEP}") - else() - message("-- cmaki_find_package: ${DEP}") - cmaki_find_package("${DEP}" NONRECURSIVE) - endif() - endforeach() - endif() - - get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) - get_filename_component(package_name_version "${package_dir}" NAME) - - # 3. si no tengo los ficheros de cmake, los intento descargar - set(artifacts_dir "${NPP_ARTIFACTS_PATH}") - set(depends_bin_package "${artifacts_dir}/${PACKAGE}-${VERSION}") - set(depends_package "${artifacts_dir}/${PACKAGE}-${VERSION}") - # pido un paquete, en funcion de: - # - paquete - # - version - # - plataforma - # - modo (COMPATIBLE / EXACT) - # Recibo el que mejor se adapta a mis especificaciones - # Otra opcion es enviar todos los ficheros de cmake de todas las versiones - - set(package_cmake_filename "${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz") - set(package_marker "${CMAKE_PREFIX_PATH}/${package_name_version}/${CMAKI_IDENTIFIER}.cmake") - set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") - set(package_generated_file ${artifacts_dir}/${package_filename}) - - set(COPY_SUCCESFUL FALSE) - IF(EXISTS "${package_cmake_abspath}") - message("-- reusing cmake file ${package_cmake_abspath}") - set(COPY_SUCCESFUL TRUE) - else() - if(NOT "${FORCE_GENERATION}") - set(http_package_cmake_filename "${CMAKI_REPOSITORY}/download.php?file=${package_cmake_filename}") - message("-- download file: ${http_package_cmake_filename} in ${package_cmake_abspath}") - cmaki_download_file("${http_package_cmake_filename}" "${package_cmake_abspath}") - if(NOT "${COPY_SUCCESFUL}") - file(REMOVE "${package_binary_filename}") - message("Error downloading ${http_package_cmake_filename}") - endif() - else() - message("WARN: no using cache remote for: ${PACKAGE}") - endif() - endif() - - if(NOT "${COPY_SUCCESFUL}") - message("fail download") - else() - message("reused or downloaded") - endif() - - # si la descarga no ha ido bien O no quieres utilizar cache - if(NOT "${COPY_SUCCESFUL}" OR FORCE_GENERATION STREQUAL "TRUE") - - # 5. compilo y genera el paquete en local - message("Generating artifact ${PACKAGE} ...") - - ### - message("${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o") - ### - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/build.py ${PACKAGE} --rootdir=${NPP_GENERATOR_PATH} --depends=${NPP_PACKAGE_JSON_FILE} --cmakefiles=${CMAKI_PATH} --prefix=${NPP_ARTIFACTS_PATH} --third-party-dir=${CMAKE_PREFIX_PATH} --server=${CMAKI_REPOSITORY} -o - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - RESULT_VARIABLE artifacts_result - ) - if(artifacts_result) - message(FATAL_ERROR "can't create artifact ${PACKAGE}: error ${artifacts_result}") - endif() - - ####################################################### - # 6: obtengo la version del paquete creado - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --server=${CMAKI_REPOSITORY} --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - list(GET RESULT_VERSION 0 PACKAGE_MODE) - list(GET RESULT_VERSION 1 PACKAGE_NAME) - list(GET RESULT_VERSION 2 VERSION) - message("NEW! PACKAGE_MODE = ${PACKAGE_MODE}") - message("NEW! PACKAGE_NAME = ${PACKAGE_NAME}") - message("NEW! VERSION = ${VERSION}") - else() - message(FATAL_ERROR "-- not found ${PACKAGE}.") - endif() - ####################################################### - - set(package_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz) - set(package_cmake_filename ${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}-cmake.tar.gz) - # refresh name (NEW $VERSION is generated) - set(package_cmake_abspath "${artifacts_dir}/${package_cmake_filename}") - - # 7. descomprimo el artefacto - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" - WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}" - RESULT_VARIABLE uncompress_result - ) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") - endif() - - # y tambien descomprimo el propio tar gz - # execute_process( - # COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_generated_file}" - # WORKING_DIRECTORY "${artifacts_dir}/" - # RESULT_VARIABLE uncompress_result2 - # ) - # if(uncompress_result2) - # message(FATAL_ERROR "Extracting ${package_generated_file} failed! Error ${uncompress_result2}") - # endif() - - # tengo el cmake pero no esta descomprimido - elseif(EXISTS "${package_cmake_abspath}" AND NOT EXISTS "${package_marker}") - - message("-- only uncompress") - ################ - message("${CMAKE_COMMAND} -E tar zxf ${package_cmake_abspath}") - ################ - - # 10. lo descomprimo - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_cmake_abspath}" - WORKING_DIRECTORY "${CMAKE_PREFIX_PATH}/" - RESULT_VARIABLE uncompress_result) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_cmake_abspath} failed! Error ${uncompress_result}") - endif() - - else() - - # tengo cmake, y esta descomprmido - message("-- nothing to do") - message("-- ${package_cmake_abspath}") - message("-- ${package_marker}") - - endif() - - - # 12. hacer find_package tradicional, ahora que tenemos los ficheros de cmake - if(${PACKAGE_MODE} STREQUAL "EXACT") - message("-- using ${PACKAGE} ${VERSION} in EXACT") - find_package(${PACKAGE} ${VERSION} EXACT REQUIRED) - else() - message("-- using ${PACKAGE} ${VERSION} in COMPATIBLE") - find_package(${PACKAGE} ${VERSION} REQUIRED) - endif() - - # generate json - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/save_package.py --name=${PACKAGE} --depends=${NPP_PACKAGE_JSON_FILE} --version=${VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - if(RESULT_VERSION) - message("error saving ${PACKAGE}:${VERSION} in ${artifacts_dir}") - endif() - - # 13 add includes - string(TOUPPER "${PACKAGE}" PACKAGE_UPPER) - foreach(INCLUDE_DIR ${${PACKAGE_UPPER}_INCLUDE_DIRS}) - list(APPEND CMAKI_INCLUDE_DIRS "${INCLUDE_DIR}") - endforeach() - - # 14. add libdirs - foreach(LIB_DIR ${${PACKAGE_UPPER}_LIBRARIES}) - list(APPEND CMAKI_LIBRARIES "${LIB_DIR}") - endforeach() - - # 15. add vers specific - set(${PACKAGE_UPPER}_INCLUDE_DIRS "${${PACKAGE_UPPER}_INCLUDE_DIRS}" PARENT_SCOPE) - set(${PACKAGE_UPPER}_LIBRARIES "${${PACKAGE_UPPER}_LIBRARIES}" PARENT_SCOPE) - - # 16. add vars globals - set(CMAKI_INCLUDE_DIRS "${CMAKI_INCLUDE_DIRS}" PARENT_SCOPE) - set(CMAKI_LIBRARIES "${CMAKI_LIBRARIES}" PARENT_SCOPE) - - message("-- end cmaki_find_package") - -endfunction() - -macro(cmaki_package_version_check) - # llamar a check_remote_version - # dando el nombre recibo la version - execute_process( - COMMAND ${PYTHON_EXECUTABLE} ${NPP_GENERATOR_PATH}/check_remote_version.py --artifacts=${CMAKE_PREFIX_PATH} --platform=${CMAKI_IDENTIFIER} --name=${PACKAGE_FIND_NAME} --version=${PACKAGE_FIND_VERSION} - WORKING_DIRECTORY "${NPP_GENERATOR_PATH}" - OUTPUT_VARIABLE RESULT_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE) - list(GET RESULT_VERSION 0 RESULT) - list(GET RESULT_VERSION 1 NAME) - list(GET RESULT_VERSION 2 VERSION) - ################################### - set(PACKAGE_VERSION_${RESULT} 1) - set(${NAME}_VERSION ${VERSION}) -endmacro() - -function(cmaki_install_3rdparty) - foreach(CMAKI_3RDPARTY_TARGET ${ARGV}) - foreach(CMAKI_BUILD_TYPE ${CMAKE_CONFIGURATION_TYPES} ${CMAKE_BUILD_TYPE}) - string(TOUPPER "${CMAKI_BUILD_TYPE}" CMAKI_BUILD_TYPE_UPPER) - get_target_property(CMAKI_3RDPARTY_TARGET_TYPE ${CMAKI_3RDPARTY_TARGET} TYPE) - if(${CMAKI_3RDPARTY_TARGET_TYPE} STREQUAL "SHARED_LIBRARY") - get_target_property(CMAKI_3RDPARTY_TARGET_LOCATION ${CMAKI_3RDPARTY_TARGET} IMPORTED_LOCATION_${CMAKI_BUILD_TYPE_UPPER}) - get_target_property(CMAKI_3RDPARTY_TARGET_SONAME ${CMAKI_3RDPARTY_TARGET} IMPORTED_SONAME_${CMAKI_BUILD_TYPE_UPPER}) - get_target_property(CMAKI_3RDPARTY_TARGET_PDB ${CMAKI_3RDPARTY_TARGET} IMPORTED_PDB_${CMAKI_BUILD_TYPE_UPPER}) - if(CMAKI_3RDPARTY_TARGET_SONAME) - get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION_PATH "${CMAKI_3RDPARTY_TARGET_LOCATION}" PATH) - set(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION_PATH}/${CMAKI_3RDPARTY_TARGET_SONAME}") - endif() - get_filename_component(CMAKI_3RDPARTY_TARGET_INSTALLED_NAME "${CMAKI_3RDPARTY_TARGET_LOCATION}" NAME) - get_filename_component(CMAKI_3RDPARTY_TARGET_LOCATION "${CMAKI_3RDPARTY_TARGET_LOCATION}" REALPATH) - install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_LOCATION} - DESTINATION ${CMAKI_BUILD_TYPE} - CONFIGURATIONS ${CMAKI_BUILD_TYPE} - RENAME ${CMAKI_3RDPARTY_TARGET_INSTALLED_NAME}) - if((NOT UNIX) AND EXISTS ${CMAKI_3RDPARTY_TARGET_PDB}) - get_filename_component(CMAKI_3RDPARTY_TARGET_PDB_NAME "${CMAKI_3RDPARTY_TARGET_PDB}" NAME) - install(PROGRAMS ${CMAKI_3RDPARTY_TARGET_PDB} - DESTINATION ${CMAKI_BUILD_TYPE} - CONFIGURATIONS ${CMAKI_BUILD_TYPE} - RENAME ${CMAKI_3RDPARTY_TARGET_PDB_NAME}) - endif() - endif() - endforeach() - endforeach() -endfunction() - -function(cmaki_download_file THE_URL INTO_FILE) - set(COPY_SUCCESFUL FALSE PARENT_SCOPE) - file(DOWNLOAD ${THE_URL} ${INTO_FILE} STATUS RET) - list(GET RET 0 RET_CODE) - if(RET_CODE EQUAL 0) - set(COPY_SUCCESFUL TRUE PARENT_SCOPE) - else() - set(COPY_SUCCESFUL FALSE PARENT_SCOPE) - endif() -endfunction() - -macro(cmaki_download_package) - - message("-- begin cmaki_download_package") - if(NOT DEFINED CMAKI_REPOSITORY) - set(CMAKI_REPOSITORY "$ENV{NPP_SERVER}") - endif() - get_filename_component(package_dir "${CMAKE_CURRENT_LIST_FILE}" PATH) - get_filename_component(package_name_version "${package_dir}" NAME) - set(package_filename "${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz") - set(http_package_filename ${CMAKI_REPOSITORY}/download.php?file=${package_filename}) - set(artifacts_dir "${NPP_ARTIFACTS_PATH}") - get_filename_component(artifacts_dir "${artifacts_dir}" ABSOLUTE) - set(package_binary_filename "${artifacts_dir}/${PACKAGE}-${VERSION}-${CMAKI_IDENTIFIER}.tar.gz") - set(package_uncompressed_dir "${artifacts_dir}/${package_name_version}-binary.tmp") - set(package_marker "${artifacts_dir}/${package_name_version}/${CMAKI_IDENTIFIER}") - set(package_compressed_md5 "${package_dir}/${package_name_version}-${CMAKI_IDENTIFIER}.md5") - set(_MY_DIR "${package_dir}") - set(_DIR "${artifacts_dir}/${package_name_version}") - - if(NOT EXISTS "${package_binary_filename}") - message("download ${package_binary_filename} ...") - if(EXISTS "${package_compressed_md5}") - file(READ "${package_compressed_md5}" md5sum ) - string(REGEX MATCH "[0-9a-fA-F]*" md5sum "${md5sum}") - # TODO: use md5sum (use python for download) - # cmaki_download_file("${http_package_filename}" "${package_binary_filename}" "${md5sum}" ) - message("downloading ${http_package_filename}") - cmaki_download_file("${http_package_filename}" "${package_binary_filename}") - if(NOT "${COPY_SUCCESFUL}") - file(REMOVE "${package_binary_filename}") - message(FATAL_ERROR "Error downloading ${http_package_filename}") - endif() - else() - file(REMOVE_RECURSE "${package_dir}") - file(REMOVE_RECURSE "${_DIR}") - MESSAGE(FATAL_ERROR "Checksum for ${package_name_version}-${CMAKI_IDENTIFIER}.tar.gz not found. Rejecting to download an untrustworthy file.") - endif() - endif() - - if(NOT EXISTS "${package_marker}") - message("Extracting ${package_binary_filename} into ${package_uncompressed_dir}...") - file(MAKE_DIRECTORY "${package_uncompressed_dir}") - execute_process( - COMMAND "${CMAKE_COMMAND}" -E tar zxf "${package_binary_filename}" - WORKING_DIRECTORY "${package_uncompressed_dir}" - RESULT_VARIABLE uncompress_result) - if(uncompress_result) - message(FATAL_ERROR "Extracting ${package_binary_filename} failed! Error ${uncompress_result}") - endif() - file(COPY "${package_uncompressed_dir}/${package_name_version}" DESTINATION "${artifacts_dir}") - file(REMOVE_RECURSE "${package_uncompressed_dir}") - endif() - message("-- end cmaki_download_package") - -endmacro() - -function(cmaki_executable) - cmaki_parse_parameters(${ARGV}) - set(_EXECUTABLE_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_EXECUTABLE_NAME}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - if(WIN32) - ADD_EXECUTABLE(${_EXECUTABLE_NAME} WIN32 ${_SOURCES}) - else() - ADD_EXECUTABLE(${_EXECUTABLE_NAME} ${_SOURCES}) - endif() - # set_target_properties(${_EXECUTABLE_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_EXECUTABLE_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_EXECUTABLE_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_EXECUTABLE_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_EXECUTABLE_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_EXECUTABLE_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() - generate_vcxproj_user(${_EXECUTABLE_NAME}) - -endfunction() - -function(cmaki_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} SHARED ${_SOURCES}) - # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_static_library) - cmaki_parse_parameters(${ARGV}) - set(_LIBRARY_NAME ${_MAIN_NAME}) - source_group( "Source Files" FILES ${_SOURCES} ) - common_flags() - common_linking(${_LIBRARY_NAME}) - add_definitions(-D${_LIBRARY_NAME}_STATIC) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_library(${_LIBRARY_NAME} STATIC ${_SOURCES}) - # set_target_properties(${_LIBRARY_NAME} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_LIBRARY_NAME} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_LIBRARY_NAME} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_LIBRARY_NAME} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_LIBRARY_NAME} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_LIBRARY_NAME} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endfunction() - -function(cmaki_test) - cmaki_parse_parameters(${ARGV}) - set(_TEST_NAME ${_MAIN_NAME}) - set(_TEST_SUFFIX "_unittest") - common_flags() - common_linking(${_TEST_NAME}${_TEST_SUFFIX}) - include_directories(node_modules) - foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) - endforeach() - IF(WITH_CONAN) - include_directories(${CONAN_INCLUDE_DIRS}) - ENDIF() - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - add_compile_options(-pthread) - endif() - endif() - add_executable(${_TEST_NAME}${_TEST_SUFFIX} ${_SOURCES}) - # set_target_properties(${_TEST_NAME}${_TEST_SUFFIX} PROPERTIES DEBUG_POSTFIX _d) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${_DEPENDS}) - foreach(LIB_DIR ${CMAKI_LIBRARIES}) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${LIB_DIR}) - cmaki_install_3rdparty(${LIB_DIR}) - endforeach() - IF(WITH_CONAN) - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} ${CONAN_LIBS}) - cmaki_install_3rdparty(${CONAN_LIBS}) - ENDIF() - install(DIRECTORY ${CONAN_LIB_DIRS}/ DESTINATION ${CMAKE_BUILD_TYPE}) - if(HAVE_PTHREADS) - if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - message("-- android no need extra linkage for pthreads") - else() - target_link_libraries(${_TEST_NAME}${_TEST_SUFFIX} -lpthread) - endif() - endif() - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_TEST_NAME}${_TEST_SUFFIX} - DESTINATION ${BUILD_TYPE}/${_SUFFIX_DESTINATION} - CONFIGURATIONS ${BUILD_TYPE}) - if (DEFINED TESTS_VALGRIND AND (TESTS_VALGRIND STREQUAL "TRUE") AND (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") AND (CMAKE_BUILD_TYPE STREQUAL "Release")) - find_program(VALGRIND "valgrind") - if(VALGRIND) - add_test( - NAME ${_TEST_NAME}_valgrind_memcheck - COMMAND "${VALGRIND}" --tool=memcheck --leak-check=yes --show-reachable=yes --num-callers=20 --track-fds=yes $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_cachegrind - COMMAND "${VALGRIND}" --tool=cachegrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_helgrind - COMMAND "${VALGRIND}" --tool=helgrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_callgrind - COMMAND "${VALGRIND}" --tool=callgrind $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - add_test( - NAME ${_TEST_NAME}_valgrind_drd - COMMAND "${VALGRIND}" --tool=drd --read-var-info=yes $ --gmock_verbose=error - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE} - ) - else() - message(FATAL_ERROR "no valgrind detected") - endif() - endif() - if(WIN32) - add_test( - NAME ${_TEST_NAME}${_TEST_SUFFIX} - COMMAND $ - WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE}) - else() - add_test( - NAME ${_TEST_NAME}${_TEST_SUFFIX} - COMMAND bash ../cmaki_emulator.sh $ - WORKING_DIRECTORY ${CMAKI_INSTALL}/${BUILD_TYPE} - CONFIGURATIONS ${BUILD_TYPE}) - endif() - endforeach() - generate_vcxproj_user(${_TEST_NAME}) - -endfunction() - -macro(cmaki_google_test) - find_package(GTest REQUIRED) - find_package(GMock REQUIRED) - add_definitions(-DWITH_MAIN) - add_definitions(-DWITH_GMOCK) - set(PARAMETERS ${ARGV}) - list(GET PARAMETERS 0 _MAIN_NAME) - cmaki_test(${ARGV}) -endmacro() - -macro(cmaki_python_library) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - cmaki_library(${ARGV} PTHREADS) - cmaki_parse_parameters(${ARGV}) - set_target_properties(${_MAIN_NAME} PROPERTIES PREFIX "") - foreach(BUILD_TYPE ${CMAKE_BUILD_TYPE}) - INSTALL( TARGETS ${_MAIN_NAME} - DESTINATION ${BUILD_TYPE}/lib/python3.5/lib-dynload - CONFIGURATIONS ${BUILD_TYPE}) - endforeach() -endmacro() - -macro(cmaki_boost_python_test) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - cmaki_google_test(${ARGV} PTHREADS) - cmaki_parse_parameters(${ARGV}) - set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") -endmacro() - -macro(cmaki_python_test) - # cmaki_find_package(python) - cmaki_parse_parameters(${ARGV}) - add_test( NAME ${_MAIN_NAME}_test - COMMAND ./bin/python3 ${_SOURCES} - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}) - set_tests_properties(${_MAIN_NAME}_test PROPERTIES ENVIRONMENT "LD_LIBRARY_PATH=${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") -endmacro() - -macro(cmaki_python_install) - # cmaki_find_package(python) - # cmaki_find_package(boost-python) - get_filename_component(PYTHON_DIR ${PYTHON_EXECUTABLE} DIRECTORY) - get_filename_component(PYTHON_PARENT_DIR ${PYTHON_DIR} DIRECTORY) - cmaki_install_inside_dir(${PYTHON_PARENT_DIR}) -endmacro() - -macro(cmaki_find_package_boost) - if(CMAKE_BUILD_TYPE MATCHES Debug) - set(Boost_DEBUG 1) - else() - set(Boost_DEBUG 0) - endif() - find_package(Boost REQUIRED) - include_directories(${Boost_INCLUDE_DIRS}) -endmacro() - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format deleted file mode 100644 index 008e6b0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/init/.clang-format +++ /dev/null @@ -1,66 +0,0 @@ ---- -Language: Cpp -# BasedOnStyle: WebKit -# indent public: -AccessModifierOffset: -4 -AlignAfterOpenBracket: false -AlignEscapedNewlinesLeft: false -AlignOperands: false -AlignTrailingComments: true -AllowAllParametersOfDeclarationOnNextLine: false -AllowShortBlocksOnASingleLine: false -AllowShortCaseLabelsOnASingleLine: false -AllowShortIfStatementsOnASingleLine: false -AllowShortLoopsOnASingleLine: false -AllowShortFunctionsOnASingleLine: All -AlwaysBreakAfterDefinitionReturnType: false -AlwaysBreakTemplateDeclarations: true -AlwaysBreakBeforeMultilineStrings: false -BreakBeforeBinaryOperators: All -BreakBeforeTernaryOperators: true -BreakConstructorInitializersBeforeComma: true -BinPackParameters: true -BinPackArguments: true -ColumnLimit: 100 -ConstructorInitializerAllOnOneLineOrOnePerLine: false -ConstructorInitializerIndentWidth: 4 -DerivePointerAlignment: false -ExperimentalAutoDetectBinPacking: false -IndentCaseLabels: true -IndentWrappedFunctionNames: false -IndentFunctionDeclarationAfterType: false -MaxEmptyLinesToKeep: 2 -KeepEmptyLinesAtTheStartOfBlocks: true -NamespaceIndentation: Inner -ObjCBlockIndentWidth: 4 -ObjCSpaceAfterProperty: true -ObjCSpaceBeforeProtocolList: true -PenaltyBreakBeforeFirstCallParameter: 19 -PenaltyBreakComment: 300 -PenaltyBreakString: 1000 -PenaltyBreakFirstLessLess: 120 -PenaltyExcessCharacter: 1000000 -PenaltyReturnTypeOnItsOwnLine: 60 -PointerAlignment: Left -SpacesBeforeTrailingComments: 2 -Cpp11BracedListStyle: true -Standard: Cpp11 -IndentWidth: 4 -TabWidth: 4 -UseTab: Always -BreakBeforeBraces: Allman -SpacesInParentheses: false -SpacesInSquareBrackets: false -SpacesInAngles: false -SpaceInEmptyParentheses: false -SpacesInCStyleCastParentheses: false -SpaceAfterCStyleCast: false -SpacesInContainerLiterals: true -SpaceBeforeAssignmentOperators: true -ContinuationIndentWidth: 4 -CommentPragmas: '^ IWYU pragma:' -ForEachMacros: [ foreach, Q_FOREACH, BOOST_FOREACH ] -SpaceBeforeParens: ControlStatements -DisableFormat: false -... - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl deleted file mode 100644 index 3ea29e5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/CTest2JUnit.xsl +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BuildName: - BuildStamp: - Name: - Generator: - CompilerName: - OSName: - Hostname: - OSRelease: - OSVersion: - OSPlatform: - Is64Bits: - VendorString: - VendorID: - FamilyID: - ModelID: - ProcessorCacheSize: - NumberOfLogicalCPU: - NumberOfPhysicalCPU: - TotalVirtualMemory: - TotalPhysicalMemory: - LogicalProcessorsPerPhysical: - ProcessorClockFrequency: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md deleted file mode 100644 index 4f989c6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki/junit/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Source -https://bitbucket.org/shackra/ctest-jenkins/ - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml deleted file mode 100644 index 020ec9d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -services: docker -os: linux -script: - - ./build.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE deleted file mode 100644 index 53546c1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md deleted file mode 100644 index 594568c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# cmaki_docker - -[![Build Status](https://travis-ci.org/makiolo/cmaki_docker.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_docker) - -multiple pusher of docker images. - -``` -for image in (windows-x86, windows-x64, linux-x86, linux-x64, ...) - makiolo/$image = dockcross/$image + github:makiolo/cmaki_scripts/cmaki_depends.sh -done -``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh deleted file mode 100755 index 26e71f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_docker/build.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env bash -#/bin/bash -prefix=$(pwd)/bin -mkdir -p $prefix - -# iterate in known images -curl https://raw.githubusercontent.com/dockcross/dockcross/master/Makefile -o dockcross-Makefile -for image in $(make -f dockcross-Makefile display_images); do - if [[ $(docker images -q dockcross/$image) != "" ]]; then - docker rmi -f dockcross/$image - echo dockcross/$image removed. - fi -done - -for image in $(make -f dockcross-Makefile display_images); do - - if [[ "$image" == "manylinux-x86" ]]; then - continue - fi - - if [[ "$image" == "manylinux-x64" ]]; then - continue - fi - - echo "copy dockcross/$image to makiolo/$image (with script change)" - cat<Dockerfile -FROM dockcross/$image:latest -ENV DEBIAN_FRONTEND noninteractive -RUN curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/cmaki_depends.sh | bash -EOF - - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - docker build . -t makiolo/$image - docker push makiolo/$image - - # clean - docker rmi -f dockcross/$image - docker rmi -f makiolo/$image -done - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt deleted file mode 100644 index 91cc3ac..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/CMakeLists.txt +++ /dev/null @@ -1,95 +0,0 @@ -project(cmaki_generator) -cmake_minimum_required(VERSION 3.0) - -MESSAGE("-- compiler ${CMAKI_COMPILER}, platform ${CMAKI_PLATFORM}") - -include(cmaki) - -IF(CMAKE_BUILD_TYPE STREQUAL "Debug") - MESSAGE("-- Debug Mode") - SET(GLOBAL_BUILD_MODE "Debug") -ELSEIF(CMAKE_BUILD_TYPE STREQUAL "Release") - MESSAGE("-- Release Mode") - SET(GLOBAL_BUILD_MODE "Release") -ELSEIF(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") - MESSAGE("-- RelWithDebInfo Mode") - SET(GLOBAL_BUILD_MODE "RelWithDebInfo") -ELSE() - MESSAGE("-- Build mode default to Release") - MESSAGE("-- Release Mode") - SET(GLOBAL_BUILD_MODE "Release") -ENDIF() - -IF(NOT PACKAGE) - SET(PACKAGE "packagename_invalid") - MESSAGE(FATAL_ERROR "Invalid package name") -ENDIF() - -IF(NOT PACKAGE_VERSION) - SET(PACKAGE_VERSION "verson_invalid") - MESSAGE(FATAL_ERROR "Invalid version in package") -ENDIF() - -IF(NOT LIBRARY_TYPE) - SET(LIBRARY_TYPE "STATIC") -ENDIF() - -SET(PACKAGE "${PACKAGE}" CACHE STRING "Package to compile") -SET(PACKAGE_VERSION "${PACKAGE_VERSION}" CACHE STRING "Version to compile") - -SET(CMAKE_VERBOSE_MAKEFILE ON) -# Use relative paths on Windows, to reduce path size for command-line limits -if (WIN32) - set(CMAKE_USE_RELATIVE_PATHS true) - set(CMAKE_SUPPRESS_REGENERATION true) -endif() - -IF(NOT DEFINED GTC_INSTALL_PREFIX) - SET(GTC_INSTALL_PREFIX "${NPP_ARTIFACTS_PATH}/${PACKAGE}-${PACKAGE_VERSION}-${CMAKI_PLATFORM}/${PACKAGE}-${PACKAGE_VERSION}") - SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}") - SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder executables") - SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/${CMAKI_PLATFORM}" CACHE PATH "Folder libs") -ELSE() - SET(GTC_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") - SET(CMAKE_INSTALL_PREFIX "${GTC_INSTALL_PREFIX}") - SET(EXECUTABLE_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/bin" CACHE PATH "Folder executables") - SET(LIBRARY_OUTPUT_PATH "${GTC_INSTALL_PREFIX}/lib" CACHE PATH "Folder libs") -ENDIF() - -MESSAGE("CMAKI_INSTALL = ${CMAKI_INSTALL}") -MESSAGE("GTC_INSTALL_PREFIX = ${GTC_INSTALL_PREFIX}") -MESSAGE("CMAKE_INSTALL_PREFIX = ${CMAKE_INSTALL_PREFIX}") -MESSAGE("EXECUTABLE_OUTPUT_PATH = ${EXECUTABLE_OUTPUT_PATH}") -MESSAGE("LIBRARY_OUTPUT_PATH = ${LIBRARY_OUTPUT_PATH}") - -# gnu variables can prepend CMAKE_INSTALL_PREFIX -set(CMAKE_INSTALL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") -set(CMAKE_INSTALL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") -set(CMAKE_INSTALL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") -set(CMAKE_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") -set(CMAKE_INSTALL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") -set(CMAKE_INSTALL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") -set(CMAKE_INSTALL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") -set(CMAKE_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") -set(CMAKE_INSTALL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") -set(CMAKE_INSTALL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") -set(CMAKE_INSTALL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") -set(CMAKE_INSTALL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") -set(CMAKE_INSTALL_FULL_BINDIR "${CMAKE_INSTALL_PREFIX}/bin") -set(CMAKE_INSTALL_FULL_SBINDIR "${CMAKE_INSTALL_PREFIX}/sbin") -set(CMAKE_INSTALL_FULL_LIBEXECDIR "${CMAKE_INSTALL_PREFIX}/libexec") -set(CMAKE_INSTALL_FULL_SYSCONFDIR "${CMAKE_INSTALL_PREFIX}/etc") -set(CMAKE_INSTALL_FULL_SHAREDSTATEDIR "${CMAKE_INSTALL_PREFIX}/com") -set(CMAKE_INSTALL_FULL_LOCALSTATEDIR "${CMAKE_INSTALL_PREFIX}/var") -set(CMAKE_INSTALL_FULL_LIBDIR "${CMAKE_INSTALL_PREFIX}/lib") -set(CMAKE_INSTALL_FULL_INCLUDEDIR "${CMAKE_INSTALL_PREFIX}/include") -set(CMAKE_INSTALL_FULL_DATAROOTDIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_FULL_DATADIR "${CMAKE_INSTALL_PREFIX}/share") -set(CMAKE_INSTALL_FULL_INFODIR "${CMAKE_INSTALL_PREFIX}/share/info") -set(CMAKE_INSTALL_FULL_LOCALEDIR "${CMAKE_INSTALL_PREFIX}/share/locale") -set(CMAKE_INSTALL_FULL_MANDIR "${CMAKE_INSTALL_PREFIX}/share/man") -set(CMAKE_INSTALL_FULL_DOCDIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PACKAGE}") -LINK_DIRECTORIES(${LIBRARY_OUTPUT_PATH}) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE deleted file mode 100644 index 7e79e4d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md deleted file mode 100644 index 6b5b746..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# cmaki_generator [![npm version](https://badge.fury.io/js/cmaki_generator.svg)](https://badge.fury.io/js/cmaki_generator) - -gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_generator.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_generator) - -# artifacts responsability -- boost-headers -- boost-system -- boost-random -- boost-atomic -- boost-thread -- boost-chrono -- boost-context -- boost-coroutine2 -- boost-signals -- boost-test -- boost-regex -- boost-filesystem -- boost-program-options -- python -- boost-python -- boost-python-debug -- boost-serialization diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build deleted file mode 100755 index c98e1d8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -directory=$(dirname $0) -if hash cygpath 2>/dev/null; then - directory=$(cygpath -w ${directory}) -fi - -python "${directory}/build.py" "$@" -out=$? -exit ${out} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd deleted file mode 100644 index e0ea6bd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.cmd +++ /dev/null @@ -1,11 +0,0 @@ -@ECHO OFF -SET DIRWORK=%~dp0 - -IF EXIST "%PYTHON%" ( - rem ok -) ELSE ( - set PYTHON=python -) - -SET PATH=%~dp0\bin;%PATH% -"%PYTHON%" %DIRWORK%\build.py %* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py deleted file mode 100644 index 5d86829..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/build.py +++ /dev/null @@ -1,757 +0,0 @@ -import os -import os.path -import sys -import fnmatch -import logging -import utils -import argparse -import pipeline -import traceback -import copy -import datetime -# object package -from third_party import ThirdParty -from collections import OrderedDict -from third_party import exceptions_fail_group -from third_party import exceptions_fail_program -from third_party import alias_priority_name -from third_party import alias_priority_name_inverse -from third_party import CMAKELIB_URL -from third_party import is_valid -from third_party import is_blacklisted -from third_party import prepare_cmakefiles -# gtc stages -from purge import purge -from prepare import prepare -from compilation import compilation -from packing import packing -from run_tests import run_tests -from upload import upload -from get_return_code import get_return_code -from third_party import FailThirdParty - -# GLOBAL NO MUTABLES -image_pattern = "image.%Y.%m.%d.%H%M" - -try: - import yaml -except ImportError: - logging.error('[Warning] Not yaml library present') - logging.error('[Warning] PyYAML (python extension) is mandatory') - if utils.is_windows(): - logging.error('You can use pip for install:') - logging.error(' pip intall pyyaml') - sys.exit(1) - -# Global mutable -compiler_replace_maps = {} - -# Global const -yaml_common_references = 'common.yml' -yaml_collapsed_third_parties = '.3p.yml' -yaml_collapsed_final = '.data.yml' - -class Loader(yaml.Loader): - def __init__(self, stream): - self._root = os.path.split(stream.name)[0] - super(Loader, self).__init__(stream) - - def include(self, node): - filename = os.path.join(self._root, self.construct_scalar(node)) - with open(filename, 'r') as f: - return yaml.load(f, Loader) - -def amalgamation_yaml(rootdir, yamlfile=None): - Loader.add_constructor('!include', Loader.include) - - # autogeneration .data.yml - yaml_collapsed_final_abspath = os.path.join(rootdir, yaml_collapsed_final) - yaml_common_references_abspath = os.path.join(rootdir, yaml_common_references) - with open(yaml_collapsed_final_abspath, 'wt') as f: - f.write('# autogenerated file, dont edit it !!!---\n') - f.write('---\n') - # inject common.yml - f.write('%sreferences:\n' % (' '*4)) - with open(yaml_common_references_abspath, 'r') as fr: - for line in fr.readlines(): - f.write('%s%s' % (' '*8, line)) - collapse_third_parties(rootdir, yaml_collapsed_third_parties, yamlfile=yamlfile) - if yamlfile is None and not parameters.no_back_yaml: - node_modules_dir = os.path.abspath(os.path.join(rootdir, '..', '..')) - for path in os.listdir(node_modules_dir): - fullpath = os.path.join(os.path.abspath(node_modules_dir), path) - if os.path.isdir(fullpath): - cmaki_file = os.path.join(fullpath, 'cmaki.yml') - if os.path.isfile(cmaki_file): - with open(cmaki_file, 'r') as fr: - with open(yaml_collapsed_third_parties, 'a') as tp_append: - for line in fr.readlines(): - tp_append.write(line) - # inject third_parties.yml - f.write('%sthird_parties:\n' % (' '*4)) - with open(yaml_collapsed_third_parties) as ft: - for line in ft.readlines(): - # sys.stdout.write("searching {}".format(line)) - f.write('%s%s' % (' '*8, line)) - -def search_nodes_by_key(list_nodes, found_key): - nodes = [] - for key, node in list_nodes: - if key == found_key: - nodes.append(node) - return nodes - -def collapse_third_parties(rootdir, filename, yamlfile=None): - p = pipeline.make_pipe() - # begin - if yamlfile is None: - p = pipeline.find(rootdir, 3)(p) - else: - p = pipeline.echo(yamlfile)(p) - # exclusions - p = pipeline.endswith('.yml')(p) - p = pipeline.grep_v('.travis.yml')(p) - p = pipeline.grep_v('shippable.yml')(p) - p = pipeline.grep_v('appveyor.yml')(p) - p = pipeline.grep_v('codecov.yml')(p) - p = pipeline.grep_v('.github')(p) - p = pipeline.grep_v('docker-compose.yml')(p) - p = pipeline.grep_v('circle.yml')(p) - p = pipeline.grep_v('_config.yml')(p) - p = pipeline.grep_v('.circleci-matrix.yml')(p) - p = pipeline.grep_v('.build_')(p) - p = pipeline.grep_v(yaml_collapsed_final)(p) - p = pipeline.grep_v(yaml_common_references)(p) - p = pipeline.grep_v(yaml_collapsed_third_parties)(p) - p = pipeline.grep_v(' - Copy.yml')(p) - p = pipeline.info('---> (yaml found.) ')(p) - # cat - p = pipeline.cat()(p) - # p = pipeline.info('amalgamated: ')(p) - # write - p = pipeline.write_file(filename)(p) - # end - pipeline.end_pipe()(p) - -def run_purge(solutions): - - # create pipeline - with pipeline.create() as (p, finisher): - - # feed all packages - p = pipeline.feed(packages)(p) - - # clean intermediate folders - p = pipeline.do(purge, True, parameters)(p) - - # close pipe - finisher.send(p) - -def convert_priority_to_integer(priority): - if priority is not None: - error = False - if priority in alias_priority_name_inverse: - priority = alias_priority_name_inverse[priority] - else: - try: - priority_integer = int(priority) - if priority_integer in alias_priority_name: - priority = priority_integer - else: - error = True - except ValueError: - error = True - if error: - logging.error('Invalid priority name: %s' % priority) - sys.exit(1) - return priority - -def show_results(parameters, groups_ordered, rets, unittests): - # show final report - anyFail = 0 - if len(rets) > 0: - logging.info('-' * 80) - logging.info('') - for name in rets: - state = rets[name] - if state != "OK": - anyFail = 1 - - # package with unittests? - if name in unittests: - try: - result_test = unittests[name] - except KeyError: - result_test = 'No unittest found' - - if state != "OK": - logging.info("Compiled %30s - STATUS: %15s" % (name, state)) - else: - # only want know test result if is OK - logging.info("Compiled %30s - STATUS: %15s - TESTS: %s" % (name, state, result_test)) - else: - logging.info("Compiled %30s - STATUS: %15s" % (name, state)) - - logging.info('') - logging.info( '-'* 80) - else: - anyFail = 1 - logging.error('No results generated.') - - # any have exceptions ? - have_exceptions = False - for _, packages in groups_ordered: - for node in packages: - if len(node.exceptions) > 0: - have_exceptions = True - - if have_exceptions: - logging.error("---------- begin summary of exceptions ------------------------") - # show postponed exceptions - for _, packages in groups_ordered: - for node in packages: - if len(node.exceptions) > 0: - # something was wrong - anyFail = 1 - # show exceptions of this package - package = node.get_package_name() - version = node.get_version() - logging.error("package %s (%s) with exceptions" % (package, version)) - i = 0 - for exc_type, exc_value, exc_traceback in node.exceptions: - logging.error("---- Exception #%d / %d ----------" % (i+1, len(node.exceptions))) - traceback.print_exception(exc_type, exc_value, exc_traceback) - logging.error("----------------------------------") - i += 1 - logging.error("---------- end summary of exceptions ------------------------") - return anyFail - -def clean_subset(solutions): - groups = copy.deepcopy(solutions) - # 2/4: remove solutions are subset of other solution - for solution1 in solutions: - for solution2 in solutions: - if solution1 != solution2: - match = True - for node in solution1: - if node not in solution2: - match = False - break - if match and (solution1 in groups): - groups.remove(solution1) - return groups - -def init_parameter_path(value, default): - if value is None: - value = default - else: - # expand variables in no-windows - if not utils.is_windows(): - value = value.replace('~', utils.get_real_home()) - value = os.path.abspath(value) - return value - - -def parse_arguments(): - - parser = argparse.ArgumentParser(prog=""" - -cmaki_generator: - - Can build artifacts in a easy way. Each third-party need a block definition in yaml. This block contain all need information necessary for download, build, testing and packing. - -usage:""") - group_main = parser.add_argument_group('basic usage') - group_main.add_argument('packages', metavar='packages', type=str, nargs='*', - help='name (or list names) third party') - group_main.add_argument('--plan', '--dry-run', dest='plan', action='store_true', - help='Show packages plan (like a dry-run)', default=False) - group_main.add_argument('--server', dest='server', help='artifact server', default=None) - group_main.add_argument('--no-back-yaml', dest='no_back_yaml', action='store_true', help='no search back yaml', - default=False) - group_layer = group_main.add_mutually_exclusive_group() - group_layer.add_argument('--layer', dest='priority', - help='filter by layername. Valid values: (minimal|tools|third_party)', default=None) - group_layer.add_argument('--no-layer', dest='no_priority', - help='negation filter by layername. Valid values: (minimal|tools|third_party)', - default=None) - # group_main.add_argument('-t', '--tag', action='append', metavar='tag', type=str, help='NOT IMPLEMMENTED YET: filter tag third party') - group_padawan = parser.add_argument_group('padawan') - group_purge = group_padawan.add_mutually_exclusive_group() - group_purge.add_argument('--no-purge', dest='no_purge', action='store_true', help='remove purge from pipeline', - default=False) - group_purge.add_argument('--only-purge', dest='only_purge', action='store_true', - help='execute only purge in pipeline', default=False) - group_prepare = group_padawan.add_mutually_exclusive_group() - group_prepare.add_argument('--no-prepare', dest='no_prepare', action='store_true', - help='remove prepare from pipeline', default=False) - group_prepare.add_argument('--only-prepare', dest='only_prepare', action='store_true', - help='execute only prepare in pipeline', default=False) - group_compilation = group_padawan.add_mutually_exclusive_group() - group_compilation.add_argument('--no-compilation', dest='no_compilation', action='store_true', - help='remove compilation from pipeline', default=False) - group_compilation.add_argument('--only-compilation', dest='only_compilation', action='store_true', - help='execute only compilation in pipeline', default=False) - group_packing = group_padawan.add_mutually_exclusive_group() - group_packing.add_argument('--no-packing', dest='no_packing', action='store_true', - help='remove packing from pipeline', default=False) - group_packing.add_argument('--only-packing', dest='only_packing', action='store_true', - help='execute only packing in pipeline', default=False) - group_run_tests = group_padawan.add_mutually_exclusive_group() - group_run_tests.add_argument('--no-run-tests', dest='no_run_tests', action='store_true', - help='remove run_tests from pipeline', default=False) - group_run_tests.add_argument('--only-run-tests', dest='only_run_tests', action='store_true', - help='execute only run_tests in pipeline', default=False) - group_upload = group_padawan.add_mutually_exclusive_group() - group_upload.add_argument('--no-upload', dest='no_upload', action='store_true', help='remove upload from pipeline', - default=False) - group_upload.add_argument('--only-upload', dest='only_upload', action='store_true', - help='execute only upload in pipeline', default=False) - # creador de third parties - group_jedi = parser.add_argument_group('jedi') - group_jedi.add_argument('-o', '--only', dest='build_only', action='store_true', - help='build only explicit packages and not your depends') - group_jedi.add_argument('-v', '--verbose', action='count', help='verbose mode', default=0) - group_jedi.add_argument('-q', '--quiet', dest='quiet', action='store_true', help='quiet mode', default=False) - group_jedi.add_argument('-d', '--debug', action='store_true', help='Ridiculous debugging (probably not useful)') - group_jedi.add_argument('--purge-if-fail', dest='purge_if_fail', action='store_true', - help='purge even if a package finish with fail', default=False) - group_jedi.add_argument('--with-svn', dest='with_svn', help='svn executable', default=None) - group_jedi.add_argument('--fast', dest='fast', action='store_true', default=False, help=argparse.SUPPRESS) - group_jedi.add_argument('--log', dest='log', help='specified full path log (default is "gtc.log")', - default='gtc.log') - group_jedi.add_argument('--no-packing-cmakefiles', action='store_true', dest='no_packing_cmakefiles', - help='no packing cmakefiles', default=False) - group_jedi.add_argument('--blacklist', dest='blacklist', - help='third party in quarantine (default is $ROOTDIR + "blacklist.txt")', default=None) - group_jedi.add_argument('--no-blacklist', action='append', dest='no_blacklist', - help='list packages (separated with comma), for annular blacklist effect.', default=[]) - group_master_jedi = parser.add_argument_group('master jedi') - group_master_jedi.add_argument('--rootdir', dest='rootdir', - help='input folder with yamls, is recursive (default is current directory)', - default=None) - group_master_jedi.add_argument('--prefix', dest='prefix', - help='output folder where packages will be generated (default is $ROOTDIR + "artifacts")', - default=None) - group_master_jedi.add_argument('--cmakefiles', dest='cmakefiles', - help='input folder with cmake scripts (default is $PREFIX + "cmakelib")', - default=None) - group_master_jedi.add_argument('--third-party-dir', dest='third_party_dir', - help='output folder for cmakefiles (default is $CMAKEFILES + "3rdparty")', - default=None) - group_master_jedi.add_argument('--depends', dest='depends', help='json for save versions', default=None) - group_master_jedi.add_argument('--yaml', dest='yaml', help='unique file with third party to compile', default=None) - parameters = parser.parse_args() - ''' - TODO: - refactor: - prefix = DEPENDS_PATH (cmake3p) (artifacts) - cmakefiles = CMAKI_PATH, CMAKE_MODULE_PATH (cmaki, cmaki_find_package) - third-party-dir = CMAKE_PREFIX_PATH (directorio artifacts/cmaki_find_package) (3rdparty) - rootdir = ARTIFACTS_PATH, es la base de donde esta build.py (cmaki_generator) (scripts de generacion) tambien podria ser CMAKI_PWD - CMAKI_INSTALL: donde se espera tener instalado el cmaki_identifier - ''' - - cmaki_pwd = os.environ.get('CMAKI_PWD', os.getcwd()) - cmaki_install = os.environ.get('CMAKI_INSTALL', os.path.join(cmaki_pwd, 'bin')) - - ''' - axiomas: - - cmaki_pwd - - cmaki_install - - cmaki - - reglas: - - rootdir = cmaki/../cmaki_generator - - prefix = cmaki_pwd/artifacts - - third-party-dir = prefix/cmaki_find_package - - depends = cmaki_pwd/depends.json - - blacklist = rootdir/blacklist.txt - ''' - - - parameters.rootdir = init_parameter_path(parameters.rootdir, os.getcwd()) - parameters.prefix = init_parameter_path(parameters.prefix, os.path.join(cmaki_pwd, 'artifacts')) - parameters.third_party_dir = init_parameter_path(parameters.third_party_dir, os.path.join(parameters.prefix, 'cmaki_find_package')) - parameters.cmakefiles = init_parameter_path(parameters.cmakefiles, os.path.join(parameters.rootdir, '..', 'cmaki')) - parameters.blacklist = init_parameter_path(parameters.blacklist, os.path.join(parameters.rootdir, 'blacklist.txt')) - parameters.depends = init_parameter_path(parameters.depends, os.path.join(cmaki_pwd, 'depends.json')) - - # convert priority to int - parameters.priority = convert_priority_to_integer(parameters.priority) - parameters.no_priority = convert_priority_to_integer(parameters.no_priority) - if parameters.only_purge: - parameters.no_purge = False - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_prepare: - parameters.no_purge = True - parameters.no_prepare = False - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_compilation: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = False - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_packing: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = False - parameters.no_run_tests = True - parameters.no_upload = True - elif parameters.only_run_tests: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = False - parameters.no_upload = True - elif parameters.only_upload: - parameters.no_purge = True - parameters.no_prepare = True - parameters.no_compilation = True - parameters.no_packing = True - parameters.no_run_tests = True - parameters.no_upload = False - - if parameters.server is None: - if 'NPP_SERVER' not in os.environ: - logging.warning('Using artifacts server by default. If you need, can explicit define environment var NPP_SERVER') - os.environ['NPP_SERVER'] = 'http://artifacts.myftp.biz' - parameters.server = os.environ['NPP_SERVER'] - - - if 'NPP_CACHE' not in os.environ: - logging.warning('Using enablibing npm++ cache by default.') - os.environ['NPP_CACHE'] = 'TRUE' - - return parameters - - -if __name__ == '__main__': - - parameters = parse_arguments() - - # prepare logging - if parameters.debug: - utils.setup_logging(logging.DEBUG, parameters.log) - else: - utils.setup_logging(logging.INFO, parameters.log) - - if parameters.verbose: - logging.info('parameters = {}'.format(parameters)) - - if not parameters.quiet: - logging.info('---- MODE: {}'.format( os.environ['MODE'] )) - logging.info('---- CMAKI_PWD: {}'.format( os.environ['CMAKI_PWD'] )) - logging.info('---- CMAKI_INSTALL: {}'.format( os.environ['CMAKI_INSTALL'] )) - logging.info('---- rootdir: {}'.format(parameters.rootdir)) - logging.info('---- prefix: {}'.format(parameters.prefix)) - logging.info('---- cmakefiles: {}'.format(parameters.cmakefiles)) - logging.info('---- third_party_dir: {}'.format(parameters.third_party_dir)) - logging.info('---- blacklist: {}'.format(parameters.blacklist)) - logging.info('---- depends: {}'.format(parameters.depends)) - - - - # fetch remotes yaml - # i = 0 - # for package in parameters.packages: - # if package.startswith('github://'): - # repo = package[len('github://'):] - # utils.trymkdir('github') - # yml_file = os.path.join('github', '{}.yml'.format(repo.replace('/', '_'))) - # if os.path.isfile(yml_file): - # utils.tryremove(yml_file) - # try: - # download_from_url('https://raw.githubusercontent.com/{}/master/cmaki.yml'.format(repo), yml_file) - # except urllib2.HTTPError: - # logging.error('not found cmaki.yml in {}'.format(package)) - # sys.exit(1) - # parameters.packages[i] = repo.split('/')[1] - # i += 1 - - prepare_cmakefiles(parameters.cmakefiles) - - # generate amalgaimation yaml - amalgamation_yaml(parameters.rootdir, parameters.yaml) - - # load yaml to python - with open(yaml_collapsed_final, 'rt') as fy: - third_parties_data_yaml = yaml.load(fy, Loader) - - # generate list of tuples (key, parameters) - count = 0 - third_parties_data = [] - for third in third_parties_data_yaml['third_parties']: - for key in third: - parms = third[key] - third_parties_data.append( (key, parms) ) - count += 1 - - logging.info('Found {} packages.'.format(count)) - logging.info('Package requested: {}'.format(parameters.packages)) - - if count == 1 and (len(parameters.packages) == 0): - parameters.packages = [ third_parties_data[0][0] ] - - # create nodes and choose selected by filter and mask - nodes = [] - selected = [] - for key, parms in third_parties_data: - node = ThirdParty(parameters, key, parms) - # define variables for unused projects - package = node.get_package_name() - - # fill compiler_replace_maps - node.apply_replace_maps(compiler_replace_maps) - - if (node.is_valid() - and (parameters.priority is None or (parameters.priority == node.get_priority())) - and (parameters.no_priority is None or (parameters.no_priority != node.get_priority()))): - nodes.append( (key, node) ) - if (parameters.packages == ['.'] or parameters.packages == ['*']): - selected.append( (key, node) ) - elif ((parameters.packages == ['all']) and (not node.get_exclude_from_all())): - selected.append( (key, node) ) - else: - for exp in parameters.packages: - if fnmatch.fnmatch(key.lower(), exp.lower()): - selected.append( (key, node) ) - - logging.info('Selected {} packages.'.format(len(selected))) - - # create relations - for key, parms in third_parties_data: - try: - depends = parms['depends'] - mask = parms['mask'] - # depends valid - valid = is_valid(key, mask) - # depends blacklisted - blacklisted = is_blacklisted(parameters.blacklist, parameters.no_blacklist, key) - if (depends is not None) and valid and (not blacklisted): - for depend in depends: - nodes_key = search_nodes_by_key(nodes, key) - nodes_depend = search_nodes_by_key(nodes, depend) - for nk in nodes_key: - for nd in nodes_depend: - nk.needs(nd) - except KeyError: - # no need create relations - pass - - - # 1/7: Generate solutions in each node - solutions = [] - for key, select_node in selected: - resolved = [] - if not parameters.build_only: - select_node.resolver(resolved, []) - solutions.append( resolved ) - else: - solutions.append( [select_node] ) - - - # 2/7: clean subset - groups = clean_subset(solutions) - - - # 3/7: merge solutions with same root - sols3 = {} - for packages in groups: - first = packages[0] - if first not in sols3: - sols3[first] = [] - chunk = sols3[first] - for node in packages: - if node != first: - if node not in chunk: - chunk.append(node) - - - # 4/7: write final plan - groups = [] - for key, value in sols3.items(): - newsolution = [key] - for node in value: - newsolution.append(node) - groups.append(newsolution) - - - # 5/7: clean subset - groups = clean_subset(groups) - - # 6/7: sort groups - groups_ordered = [] - for packages in groups: - priority_total = 0 - for node in packages: - priority_total += node.get_priority() - priority_group = (priority_total / len(packages)) - groups_ordered.append( (priority_group, packages) ) - groups_ordered.sort(key=lambda tup: tup[0], reverse=False) - - # 7/7: validate groups - for priority_total, packages in groups_ordered: - if len(packages) > 0: - priority_initial = packages[0].get_priority() - for node in packages: - if priority_initial != node.get_priority(): - logging.error('[ERROR] You are mixing packages of different layers.') - logging.error('Invalid priority (%d) in package %s, expected %d:' % (node.get_priority(), node.get_package_name(), priority_initial)) - logging.error('Any in group have bad depends:') - for node in packages: - sys.stdout.write('%s, ' % node.get_package_name()) - sys.stdout.write('\n') - sys.exit(1) - - # show groups in --plan - if len(groups_ordered) > 0: - priority_prev = groups_ordered[0][0] - i = 0 - for priority_total, packages in groups_ordered: - if parameters.quiet: - j = 0 - for node in packages: - sys.stdout.write("%s" % node.get_package_name()) - if ((len(packages)-1) != j): - sys.stdout.write(";") - j += 1 - sys.stdout.write('\n') - else: - if (priority_total > priority_prev) or (i == 0): - if priority_total in alias_priority_name: - layer_name = alias_priority_name[priority_total] - else: - layer_name = '%d' % priority_total - sys.stdout.write('\nLayer: %s\n\n' % layer_name) - sys.stdout.write("\t[") - j = 0 - for node in packages: - sys.stdout.write("%s" % node.get_package_name()) - if ((len(packages)-1) != j): - sys.stdout.write(", ") - j += 1 - sys.stdout.write("]") - sys.stdout.write('\n') - - priority_prev = priority_total - i += 1 - sys.stdout.write('\n') - sys.stdout.flush() - else: - logging.warning('No results.') - # with --plan flag is like use --dry-run - if parameters.plan: - sys.exit(0) - - try: - rets = OrderedDict() - unittests = OrderedDict() - skipping_if_priority_gt = 999 - announce_once = False - # - # pipeline: prepare, compile, packing, run_tests - # - for priority_group, packages in groups_ordered: - - if priority_group > skipping_if_priority_gt: - if not announce_once: - logging.error("ignoring group because some previous group are failing:") - logging.warning('\tgroup is formed by:') - announce_once = True - else: - logging.warning('') - for node in packages: - logging.warning(' -- %s' % node.get_package_name()) - continue - - if len(packages) > 1: - logging.info('--- Start group ---') - for node in packages: - logging.info('- %s' % node.get_package_name()) - # prepare include scripts - node.generate_scripts_headers(compiler_replace_maps) - - try: - if not parameters.no_purge: - run_purge(packages) - - # create pipeline - p = pipeline.make_pipe() - - # feed third parties - p = pipeline.feed(packages)(p) - - if not parameters.no_prepare: - # download sources - p = pipeline.do(prepare, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_compilation: - # ./configure && make (configuration and compilation) - p = pipeline.do(compilation, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_packing: - # packing (generate .tar.gz) - p = pipeline.do(packing, False, parameters, compiler_replace_maps)(p) - - if not parameters.no_run_tests: - # execute unittests and save results in "unittests" - p = pipeline.do(run_tests, False, parameters, compiler_replace_maps, unittests)(p) - - if not parameters.no_upload: - # upload artifacts - p = pipeline.do(upload, False, parameters, compiler_replace_maps)(p) - - # save results in "rets" - p = get_return_code(parameters, rets)(p) - - # close pipe - pipeline.end_pipe()(p) - - except FailThirdParty as e: - skipping_if_priority_gt = priority_group - logging.error("stopping full group.") - - except exceptions_fail_group: - logging.warning('Fatal exception in group:') - for node in packages: - logging.warning('-- %s' % node.get_package_name()) - - finally: - # only purge when you are executing a full group - if (not parameters.build_only) and (not parameters.no_purge): - if parameters.purge_if_fail: - run_purge(packages) - else: - # purge only if all packages are ok - ret = 0 - for node in packages: - ret += node.ret - - if ret == 0: - run_purge(packages) - else: - if len(packages) > 1: - logging.warning('Any in group is failing. No purge next group:') - for node in packages: - logging.warning(' %s' % node.get_package_name()) - else: - logging.warning('No purge %s because finished with fail' % node.get_package_name()) - - except exceptions_fail_program: - logging.warning('Force explicit exit ...') - finally: - ret = show_results(parameters, groups_ordered, rets, unittests) - sys.exit(ret) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py deleted file mode 100644 index 4ab073a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/check_remote_version.py +++ /dev/null @@ -1,233 +0,0 @@ -import os -import sys -import logging -import argparse -from requests import get # to make GET request -from io import StringIO -import csv -import utils -import functools - -version_separator = '.' -version_count_max = 4 - - -# def read_remote_csv(url): -# fp = urllib.request.urlopen(url) -# mybytes = fp.read() -# content = mybytes.decode("utf8") -# fp.close() -# return content - - -def read_remote_csv(url): - response = get(url) - response = response.content.decode("utf8") - return response - - -def version_to_tuple(version_str): - try: - if (version_str is not None) and (len(version_str) > 0): - count = len(version_str.split(version_separator)) - list_data = [int(x) for x in version_str.split(version_separator)] - zeros = [0 for x in range(version_count_max - count)] - list_data.extend(zeros) - return tuple(list_data) - else: - return None - except ValueError: - return None - - -class package(object): - def __init__(self, name, version, local): - self._name = name - self._version = version_to_tuple(version) - self._local = local - - def __repr__(self): - if self._version is not None: - list_version = list(self._version) - list_version = [str(x) for x in list_version] - join_version = version_separator.join(list_version) - else: - join_version = "last" - return "%s;%s" % (self._name, join_version) - - def __eq__(self, other): - return (self._name == other._name) or (self._name == '.') or (other._name == '.') - - def __ne__(self, other): - return not self.__eq__(other) - - def is_same_version(self, other): - return self._version == other._version - - def get_name(self): - return self._name - - def get_version(self): - return self._version - - def is_local(self): - return self._local - - -def sort_versions(local_swap): - if not local_swap: - one = 1 - else: - one = -1 - - def cmp(a, b): - if a.get_version() < b.get_version(): - return 1 - elif a.get_version() > b.get_version(): - return -1 - else: - if a.is_local() and not b.is_local(): - return -one - elif a.is_local() and b.is_local(): - return one - elif not a.is_local() and b.is_local(): - return one - else: - return one - return cmp - - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--artifacts', dest='artifacts', help='3rdparty path with cmakefiles', default=None) - parser.add_argument('--server', dest='server', help='artifact server', default=None) - """ - Existe un valor especial de name ".". Sirve para hacer un listado de todos los artefactos - """ - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - """ - La version fijada tiene la siguiente prioridad: - - Version fijada mediante parametros - - Version fijada mediante fichero de dependencias - - Version ultima - """ - parser.add_argument('--version', dest='version', help='version package fixed', default=None) - # TODO: packagename-1.0.0.0-windows_32-msvc_2015-debug - # --platform deberia filtrar artefactos compatibles con "MI PLATAFORMA" - parser.add_argument('--platform', dest='platform', help='platform specified', default=None) - # --compiler deberia filtrar artefactos compatibles con "MI COMPILADOR" - parameters = parser.parse_args() - - package_request = package(parameters.name, parameters.version, True) - packages_found = [] - - if parameters.artifacts is not None: - # local - utils.trymkdir(parameters.artifacts) - for path in os.listdir(parameters.artifacts): - full_path = os.path.join(parameters.artifacts, path) - # directorios que contengan "-" - if os.path.isdir(full_path) and (full_path.find('-') != -1): - basename = os.path.basename(full_path) - try: - separator = basename.rindex('-') - package_name = basename[:separator] - package_version = basename[separator+1:] - new_package = package(package_name, package_version, True) - if new_package == package_request: - packages_found.append(new_package) - except ValueError: - pass # happen with 3rdpartyversions - - """ - Buscar paquetes recien generados - """ - if parameters.artifacts is not None: - # local - basename = None - for path in os.listdir(parameters.artifacts): - full_path = os.path.join(parameters.artifacts, path) - terminator = '-cmake.tar.gz' - if os.path.isfile(full_path) and (full_path.endswith(terminator)): - if parameters.platform is None: - logging.error('Platform is needed!') - sys.exit(1) - terminator = '-%s-cmake.tar.gz' % parameters.platform - basename = os.path.basename(full_path) - try: - if basename is not None: - separator = basename.rindex(terminator) - basename = basename[:separator] - separator = basename.rindex('-') - package_name = basename[:separator] - package_version = basename[separator+1:] - new_package = package(package_name, package_version, True) - if new_package == package_request: - packages_found.append(new_package) - except ValueError: - # not found platform in file - pass - - - if parameters.server is not None: - try: - if not parameters.server.endswith('?quiet'): - parameters.server = parameters.server + '/' + '?quiet' - csv_content = read_remote_csv(parameters.server) - reader = csv.reader(StringIO(csv_content), delimiter=';') - i = 0 - for row in reader: - if len(row) >= 2: - if i > 0: - package_name = row[0] - package_version = row[1] - package_platform = row[2] - new_package = package(package_name, package_version, False) - if (parameters.platform is None) or (parameters.platform == package_platform): - if new_package == package_request: - packages_found.append(new_package) - i += 1 - except IOError: - logging.debug('error in cache artifacts: %s' % parameters.server) - - - if len(packages_found) > 0: - - if parameters.version is None: - """ - Cuando no hay version, ordeno de mayor a menor. - Al pasar False al comparador aparece primero local y luego remote en caso de ser la misma version. - Selecciona el primero y sale. - """ - for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(False))): - if package_request.is_same_version(package): - print("EXACT;%s;%s" % (package, package.get_version())) - else: - print("COMPATIBLE;%s;%s" % (package, package.get_version())) - if parameters.name != '.': - sys.exit(0) - else: - """ - Cuando se especifica una version minima - Se ordena a la inversa, es decir de menor a mayor. - Se coge el primer paquete que cumple la restriccion de version. - Al pasar True al comparador hace que en caso de empate se mantenga a pesar del reverse que - aparece primero versiones locales y luego las remotas. - """ - for package in sorted(packages_found, key=functools.cmp_to_key(sort_versions(True)), reverse=True): - if package.get_version() >= package_request.get_version(): - if package_request.is_same_version(package): - print("EXACT;%s;%s" % (package, package.get_version())) - else: - print("COMPATIBLE;%s;%s" % (package, package.get_version())) - if parameters.name != '.': - sys.exit(0) - else: - print("UNSUITABLE;;") - sys.exit(1) - -# if __name__ == '__main__': -# csv_content = read_remote_csv('http://localhost:8080') -# reader = csv.reader(StringIO(csv_content), delimiter=';') -# print(list(reader)) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml deleted file mode 100644 index 11a2c76..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/common.yml +++ /dev/null @@ -1,498 +0,0 @@ -compilation_environments: &compilation_environments - windows_32-msvc_msc_ver_*-*: - generator: "Visual Studio 15 2017" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_msc_ver_*-*: - generator: "Visual Studio 15 2017 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-msvc_2015-*: - generator: "Visual Studio 14 2015" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_2015-*: - generator: "Visual Studio 14 2015 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-msvc_2017-*: - generator: "Visual Studio 15 2017" - ext_dyn: dll - ext_sta: lib - windows_64-msvc_2017-*: - generator: "Visual Studio 15 2017 Win64" - ext_dyn: dll - ext_sta: lib - windows_32-gcc_4-*: - generator: "Unix Makefiles" - ext_dyn: dll.a - ext_sta: a - windows_64-gcc_4-*: - generator: "Unix Makefiles" - ext_dyn: dll.a - ext_sta: a - linux_*_glibc_2.*-*_*-*: - generator: "Unix Makefiles" - ext_dyn: so - ext_sta: a - macos_64-clang_*-*: - generator: "Unix Makefiles" - ext_dyn: dylib - ext_sta: a - android_arm_api_*-gcc_*-*: - generator: "Unix Makefiles" - ext_dyn: so - ext_sta: a - -thirdparty_defaults: &thirdparty_defaults - platforms: - <<: *compilation_environments - version: null - version_manager: git - mask: wlmea - mode: dri - depends: null - source: skip - packing: true - build_windows: - | - @echo off - set CMAKI_INSTALL=%SELFHOME% - npm install - unittest: - | - int main() { return 0; } - priority: 30 - -library_dynamic: &library_dynamic - common: &library_dynamic_common - include: - - $PLATFORM/include - - include - windows: &library_dynamic_windows - <<: *library_dynamic_common - dynamic: - debug: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - relwithdebinfo: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - release: - dll: - /*$TARGET*.dll/ - lib: - /*$TARGET*.lib/ - pdb: - /*$TARGET*.pdb/ - - unix: &library_dynamic_unix - <<: *library_dynamic_common - dynamic: - debug: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - relwithdebinfo: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - release: - so: - - /lib*$TARGET*.$EXT_DYN/ - - /*$TARGET*.$EXT_DYN/ - windows_*-msvc_*-*: - <<: *library_dynamic_windows - default: - <<: *library_dynamic_unix - -executable: &executable - windows: &executable_windows_common - executable: - release: - bin: - /*${TARGET}.exe/ - unix: &executable_unix_common - executable: - release: - bin: - /*${TARGET}/ - windows_*-msvc_*-*: - <<: *executable_windows_common - default: - <<: *executable_unix_common - -executable_exact: &executable_exact - windows: &executable_exact_windows_common - executable: - release: - bin: - - ${TARGET}.exe - - bin/${TARGET}.exe - - dll/${TARGET}.exe - debug: - bin: - - ${TARGET}.exe - - bin/${TARGET}.exe - - dll/${TARGET}.exe - unix: &executable_exact_unix_common - executable: - release: - bin: - - $TARGET - - bin/$TARGET - - dll/$TARGET - debug: - bin: - - $TARGET - - bin/$TARGET - - dll/$TARGET - windows_*-msvc_*-*: - <<: *executable_exact_windows_common - default: - <<: *executable_exact_unix_common - -library_dynamic_exact: &library_dynamic_exact - common: &library_dynamic_exact_common - include: - - $PLATFORM/include - - include - windows: &library_dynamic_exact_windows - <<: *library_dynamic_exact_common - dynamic: - debug: - dll: - - ${TARGET}d.dll - - bin/${TARGET}d.dll - - Debug/${TARGET}d.dll - - dll/${TARGET}d.dll - - ${TARGET}_D.dll - - bin/${TARGET}_D.dll - - Debug/${TARGET}_D.dll - - dll/${TARGET}_D.dll - - $TARGET.dll - - bin/$TARGET.dll - - Debug/$TARGET.dll - - dll/$TARGET.dll - lib: - - ${TARGET}d.lib - - lib/${TARGET}d.lib - - bin/${TARGET}d.lib - - Debug/${TARGET}d.lib - - dll/${TARGET}d.lib - - ${TARGET}_D.lib - - lib/${TARGET}_D.lib - - bin/${TARGET}_D.lib - - Debug/${TARGET}_D.lib - - dll/${TARGET}_D.lib - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - Debug/$TARGET.lib - - dll/$TARGET.lib - pdb: - - ${TARGET}d.pdb - - pdb/${TARGET}d.pdb - - bin/${TARGET}d.pdb - - Debug/${TARGET}d.pdb - - dll/${TARGET}d.pdb - - ${TARGET}_D.pdb - - pdb/${TARGET}_D.pdb - - bin/${TARGET}_D.pdb - - Debug/${TARGET}_D.pdb - - dll/${TARGET}_D.pdb - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - Debug/$TARGET.pdb - - dll/$TARGET.pdb - relwithdebinfo: - dll: - - $TARGET.dll - - bin/$TARGET.dll - - RelWithDebInfo/$TARGET.dll - - dll/$TARGET.dll - lib: - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - RelWithDebInfo/$TARGET.lib - - dll/$TARGET.lib - pdb: - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - RelWithDebInfo/$TARGET.pdb - - dll/$TARGET.pdb - release: - dll: - - $TARGET.dll - - bin/$TARGET.dll - - Release/$TARGET.dll - - dll/$TARGET.dll - lib: - - $TARGET.lib - - lib/$TARGET.lib - - bin/$TARGET.lib - - Release/$TARGET.lib - - dll/$TARGET.lib - pdb: - - $TARGET.pdb - - pdb/$TARGET.pdb - - bin/$TARGET.pdb - - Release/$TARGET.pdb - - dll/$TARGET.pdb - - unix: &library_dynamic_exact_unix - <<: *library_dynamic_exact_common - dynamic: - debug: - so: - - Debug/lib${TARGET}d.$EXT_DYN - - Debug/lib${TARGET}_D.$EXT_DYN - - Debug/lib${TARGET}_debug.$EXT_DYN - - Debug/lib${TARGET}-d.$EXT_DYN - - Debug/lib${TARGET}.$EXT_DYN - ##################### - - bin/lib${TARGET}d.$EXT_DYN - - bin/lib${TARGET}_D.$EXT_DYN - - bin/lib${TARGET}_debug.$EXT_DYN - - bin/lib${TARGET}-d.$EXT_DYN - - bin/lib${TARGET}.$EXT_DYN - ##################### - - lib/lib${TARGET}d.$EXT_DYN - - lib/lib${TARGET}_D.$EXT_DYN - - lib/lib${TARGET}_debug.$EXT_DYN - - lib/lib${TARGET}-d.$EXT_DYN - - lib/lib${TARGET}.$EXT_DYN - ##################### - - lib${ARCH}/lib${TARGET}d.$EXT_DYN - - lib${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib${ARCH}/lib${TARGET}.$EXT_DYN - ##################### - - lib${TARGET}d.$EXT_DYN - - lib${TARGET}_D.$EXT_DYN - - lib${TARGET}_debug.$EXT_DYN - - lib${TARGET}-d.$EXT_DYN - - lib${TARGET}.$EXT_DYN - ###################### - - lib/${ARCH}/lib${TARGET}d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}.$EXT_DYN - relwithdebinfo: - so: - - RelWithDebInfo/lib${TARGET}d.$EXT_DYN - - RelWithDebInfo/lib${TARGET}_D.$EXT_DYN - - RelWithDebInfo/lib${TARGET}_debug.$EXT_DYN - - RelWithDebInfo/lib${TARGET}-d.$EXT_DYN - - RelWithDebInfo/lib${TARGET}.$EXT_DYN - ##################### - - bin/lib${TARGET}d.$EXT_DYN - - bin/lib${TARGET}_D.$EXT_DYN - - bin/lib${TARGET}_debug.$EXT_DYN - - bin/lib${TARGET}-d.$EXT_DYN - - bin/lib${TARGET}.$EXT_DYN - ##################### - - lib/lib${TARGET}d.$EXT_DYN - - lib/lib${TARGET}_D.$EXT_DYN - - lib/lib${TARGET}_debug.$EXT_DYN - - lib/lib${TARGET}-d.$EXT_DYN - - lib/lib${TARGET}.$EXT_DYN - ##################### - - lib${ARCH}/lib${TARGET}d.$EXT_DYN - - lib${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib${ARCH}/lib${TARGET}.$EXT_DYN - ##################### - - lib${TARGET}d.$EXT_DYN - - lib${TARGET}_D.$EXT_DYN - - lib${TARGET}_debug.$EXT_DYN - - lib${TARGET}-d.$EXT_DYN - - lib${TARGET}.$EXT_DYN - ###################### - - lib/${ARCH}/lib${TARGET}d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_D.$EXT_DYN - - lib/${ARCH}/lib${TARGET}_debug.$EXT_DYN - - lib/${ARCH}/lib${TARGET}-d.$EXT_DYN - - lib/${ARCH}/lib${TARGET}.$EXT_DYN - release: - so: - - Release/lib$TARGET.$EXT_DYN - - bin/lib$TARGET.$EXT_DYN - - lib/lib$TARGET.$EXT_DYN - - lib${ARCH}/lib$TARGET.$EXT_DYN - - lib$TARGET.$EXT_DYN - - lib/${ARCH}/lib$TARGET.$EXT_DYN - windows_*-msvc_*-*: - <<: *library_dynamic_exact_windows - default: - <<: *library_dynamic_exact_unix - -library_static: &library_static - common: &library_static_common - include: - - $PLATFORM/include - - include - static: - debug: - lib: - /*$TARGET*.$EXT_STA/ - relwithdebinfo: - lib: - /*$TARGET*.$EXT_STA/ - release: - lib: - /*$TARGET*.$EXT_STA/ - windows_*-msvc_*-*: - <<: *library_static_common - default: - <<: *library_static_common - -library_static_exact: &library_static_exact - common: &library_static_exact_common - include: - - $PLATFORM/include - - include - static: - debug: - lib: - - Debug/lib${TARGET}d.$EXT_STA - - Debug/lib${TARGET}-d.$EXT_STA - - Debug/lib${TARGET}.$EXT_STA - - Debug/${TARGET}d.$EXT_STA - - Debug/${TARGET}-d.$EXT_STA - - Debug/${TARGET}.$EXT_STA - ################ - - lib${TARGET}d.$EXT_STA - - lib${TARGET}-d.$EXT_STA - - lib${TARGET}.$EXT_STA - - ${TARGET}d.$EXT_STA - - ${TARGET}-d.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}d.$EXT_STA - - lib/lib${TARGET}-d.$EXT_STA - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}d.$EXT_STA - - lib/${TARGET}-d.$EXT_STA - - lib/${TARGET}.$EXT_STA - relwithdebinfo: - lib: - - RelWithDebInfo/lib${TARGET}d.$EXT_STA - - RelWithDebInfo/lib${TARGET}-d.$EXT_STA - - RelWithDebInfo/lib${TARGET}.$EXT_STA - - RelWithDebInfo/${TARGET}d.$EXT_STA - - RelWithDebInfo/${TARGET}-d.$EXT_STA - - RelWithDebInfo/${TARGET}.$EXT_STA - ################ - - lib${TARGET}d.$EXT_STA - - lib${TARGET}-d.$EXT_STA - - lib${TARGET}.$EXT_STA - - ${TARGET}d.$EXT_STA - - ${TARGET}-d.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}d.$EXT_STA - - lib/lib${TARGET}-d.$EXT_STA - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}d.$EXT_STA - - lib/${TARGET}-d.$EXT_STA - - lib/${TARGET}.$EXT_STA - release: - lib: - - Release/lib${TARGET}.$EXT_STA - - Release/${TARGET}.$EXT_STA - ################ - - lib${TARGET}.$EXT_STA - - ${TARGET}.$EXT_STA - ################ - - lib/lib${TARGET}.$EXT_STA - - lib/${TARGET}.$EXT_STA - windows_*-msvc_*-*: - <<: *library_static_exact_common - default: - <<: *library_static_exact_common - -# when need distribute dll (only windows) but dont need linking -library_only_dll: &library_only_dll - windows: &library_only_dll_windows - add_3rdparty_dependencies: false - lib_provided: false - dynamic: - debug: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - relwithdebinfo: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - release: - dll: - /*$TARGET*.dll/ - pdb: - /*$TARGET*.pdb/ - -library_dynamic_boost: &library_dynamic_boost - common: &common_boost - include: - - $PLATFORM/include - - include - definitions: - - -D${PACKAGE_UPPER}_DYN_LINK - windows: &windows_dynamic_boost - <<: *common_boost - dynamic: - debug: - dll: - /$TARGET-*-mt-*d-*_*.dll/ - lib: - /$TARGET-*-mt-*d-*_*.lib/ - pdb: - null - relwithdebinfo: - dll: - /$TARGET-*-mt-*_*.dll/ - lib: - /$TARGET-*-mt-*_*.dll/ - pdb: - null - release: - dll: - /$TARGET-*-mt-*_*.dll/ - lib: - /$TARGET-*-mt-*_*.lib/ - pdb: - null - unix: &unix_dynamic_boost - <<: *common_boost - dynamic: - debug: - so: - /lib$TARGET-*-mt-*d-*_*.$EXT_DYN/ - relwithdebinfo: - so: - /lib$TARGET-*-mt-*_*.$EXT_DYN/ - release: - so: - /lib$TARGET-*-mt-*_*.$EXT_DYN/ - windows_*-msvc_*-*: - <<: *windows_dynamic_boost - default: - <<: *unix_dynamic_boost - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py deleted file mode 100644 index b80af0f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/compilation.py +++ /dev/null @@ -1,238 +0,0 @@ -import os -import utils -import logging -import pipeline -from itertools import product -from third_party import platforms -from third_party import CMAKELIB_URL -from third_party import get_identifier - - -def search_cmakelib(): - # compilando desde cmaki_generator - cmakelib_dir = os.path.join('output', '3rdparties', 'cmaki') - if not os.path.isdir(cmakelib_dir): - # compilando una dependencia - cmakelib_dir = os.path.join('..', 'cmaki') - if not os.path.isdir(cmakelib_dir): - # compilando proeycto raiz - cmakelib_dir = os.path.join('node_modules', 'cmaki') - if not os.path.isdir(cmakelib_dir): - raise Exception("not found cmaki: {}".format(os.path.abspath(cmakelib_dir))) - return os.path.abspath(cmakelib_dir) - - -def compilation(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - package_norm = node.get_package_name_norm() - version = node.get_version() - - cmake3p_dir = parameters.prefix - cmake3p_dir = utils.get_norm_path(cmake3p_dir) - cmake3p_dir = cmake3p_dir.replace('\\', '/') - - cmakefiles_dir = search_cmakelib() - - package_upper = node.get_package_name_norm_upper() - parms = node.parameters - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - install_directory = node.get_install_directory(plat) - utils.trymkdir(install_directory) - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - utils.trymkdir(build_directory) - with utils.working_directory(build_directory): - # get generator and platform info - for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): - - logging.info('-- compilation mode: %s plat: %s' % (build_mode, plat)) - - ############# 1. prepare vars - - if build_mode.lower() == 'debug': - try: - env_modified['CFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -g -O0 -D_DEBUG -DDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-g -O0 -D_DEBUG -DDEBUG' - elif build_mode.lower() == 'relwithdebinfo': - try: - env_modified['CFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-g -O2 -DNDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -g -O2 -DNDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-g -O2 -DNDEBUG' - elif build_mode.lower() == 'release': - # default packages assume came in release - try: - env_modified['CFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CFLAGS'] - except KeyError: - env_modified['CFLAGS'] = '-O3 -DNDEBUG' - try: - env_modified['CPPFLAGS'] = '%s -O3 -DNDEBUG' % env_modified['CPPFLAGS'] - except KeyError: - env_modified['CPPFLAGS'] = '-O3 -DNDEBUG' - - cores = utils.detect_ncpus() - half_cores = cores / 2 - env_modified['CORES'] = str(cores) - env_modified['HALF_CORES'] = str(half_cores) - env_modified['GTC_PREFIX'] = parameters.prefix - env_modified['CMAKELIB_URL'] = CMAKELIB_URL - env_modified['BUILD_MODE'] = str(build_mode) - # env_modified['NPP_SERVER'] = ... - env_modified['SOURCES'] = os.path.abspath(os.path.join('..', node.get_download_directory())) - env_modified['CMAKI_DIR'] = cmakefiles_dir - env_modified['SELFHOME'] = install_directory - env_modified['CMAKI_PWD'] = build_directory - env_modified['CMAKI_INSTALL'] = install_directory - - ################# - # remove cmake3p of node - node.remove_cmake3p(cmake3p_dir) - - # show env vars - node.show_environment_vars(env_modified) - - # remove CMakeCache.txt for avoid problems when - # change of generator - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - ################# - - generator_extra = '' - if generator is not None: - generator_extra = '-G"%s"' % generator - - cmakefiles_dir = parameters.cmakefiles - cmakefiles_dir = cmakefiles_dir.replace('\\', '/') - - cmake_prefix_path = parameters.third_party_dir - cmake_prefix_path = cmake_prefix_path.replace('\\', '/') - - build_directory = build_directory.replace('\\', '/') - - # resolve replace maps - compiler_replace_resolved = {} - for var, value in compiler_replace_maps.items(): - newvalue = value - newvalue = newvalue.replace('$PLATFORM', plat) - compiler_replace_resolved[var] = newvalue - - # begin definitions cmake - try: - cmake_definitions_list_original = parms['cmake_definitions'] - cmake_definitions_list = [] - for define in cmake_definitions_list_original: - # TODO: resolver tus variables directas e indirectas (de dependencias) - define = define.replace('$%s_HOME' % package_norm, install_directory) - # apply replaces - cmake_definitions_list.append( utils.apply_replaces(define, compiler_replace_resolved) ) - except KeyError: - cmake_definitions_list = [] - - # add cflags and cppflags to cmake_definitions - try: - cmake_definitions_list.append( 'CMAKE_C_FLAGS="%s"' % env_modified['CFLAGS'] ) - except KeyError: - pass - try: - cmake_definitions_list.append( 'CMAKE_CXX_FLAGS="%s"' % env_modified['CPPFLAGS'] ) - except KeyError: - pass - - definitions_extra = '' - for definition in cmake_definitions_list: - definitions_extra += ' -D%s' % definition - # end definitions cmake - - if (not 'CMAKE_TOOLCHAIN_FILE' in env_modified) or (not env_modified['CMAKE_TOOLCHAIN_FILE']) or (env_modified['CMAKE_TOOLCHAIN_FILE'] == "no cross compile"): - cmake_toolchain_file_filepath='' - else: - cmake_toolchain_file_filepath=' -DCMAKE_TOOLCHAIN_FILE="{}"'.format(env_modified['CMAKE_TOOLCHAIN_FILE']) - - cmake_prefix = node.get_cmake_prefix() - cmake_configure = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKE_MODULE_PATH=%s -DCMAKI_PATH=%s -DCMAKE_BUILD_TYPE=%s -DCMAKE_PREFIX_PATH=%s -DPACKAGE=%s -DPACKAGE_UPPER=%s -DPACKAGE_VERSION=%s -DPACKAGE_BUILD_DIRECTORY=%s -DCMAKI_COMPILER=%s -DCMAKI_IDENTIFIER=%s -DCMAKI_PLATFORM=%s %s %s' % (generator_extra, cmake_prefix, cmake3p_dir, cmakefiles_dir, cmakefiles_dir, build_mode, cmake_prefix_path, package, package_upper, version, build_directory, get_identifier('COMPILER'), get_identifier('ALL'), get_identifier('ALL'), definitions_extra, cmake_toolchain_file_filepath) - - target = node.get_cmake_target() - if target is not None: - cmake_build = 'cmake --build . --target %s --config %s' % (target, build_mode) - else: - cmake_build = 'cmake --build . --config %s' % build_mode - - env_modified['CMAKE_CONFIGURE'] = cmake_configure.replace(r'"', r"'") - env_modified['CMAKE_BUILD'] = cmake_build.replace(r'"', r"'") - - ########## 2. execute - - executed_build_script = False - if utils.is_windows(): - for build_script in ['.build.cmd', 'build.cmd']: - if os.path.exists(build_script): - # execute manual build script - node.ret += abs(utils.safe_system('%s %s %s %s %s %s' % (build_script, install_directory, package, version, plat, build_mode), env=env_modified)) - executed_build_script = True - else: - for build_script in ['.build.sh', 'build.sh']: - if os.path.exists(build_script): - # show vars - node.show_environment_vars(env_modified) - - node.ret += abs(utils.safe_system('chmod +x %s && ./%s %s %s %s %s %s' % (build_script, build_script, install_directory, package, version, plat, build_mode), env=env_modified)) - executed_build_script = True - - if not executed_build_script: - logging.debug('configure command: %s' % cmake_configure) - - ret = utils.safe_system(cmake_configure, env=env_modified) - if ret == 0: - logging.debug('build command: %s' % cmake_configure) - node.ret += abs(utils.safe_system(cmake_build, env=env_modified)) - else: - logging.warning('Configuration failed. See log: %s' % parameters.log) - node.ret += abs(ret) - - ######## 3. manual install - - # post-install - logging.debug('begin post-install') - for bc in node.get_post_install(): - chunks = [x.strip() for x in bc.split(' ') if x] - if(len(chunks) != 2) and (len(chunks) != 3): - raise Exception('Invalid value in post_install: %s. Expected [source pattern destiny]' % bc) - - source_folder = os.path.join(build_directory, os.path.dirname(chunks[0])) - install_directory_chunk = os.path.join(install_directory, chunks[1]) - pattern = os.path.basename(chunks[0]) - logging.debug('copy %s/%s to %s' % (source_folder, pattern, install_directory_chunk)) - - # create directory if not exists - utils.trymkdir(install_directory_chunk) - - p = pipeline.make_pipe() - # begin - if len(chunks) == 3: - p = pipeline.find(source_folder, 99)(p) - else: - p = pipeline.find(source_folder, 0)(p) - p = pipeline.grep_basename(pattern)(p) - p = pipeline.copy(source_folder, install_directory_chunk)(p) - p = pipeline.debug('copied ')(p) - # end - pipeline.end_pipe()(p) - logging.debug('end post-install') - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "compilation"') - break - - # finish well - return True diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py deleted file mode 100644 index 23fc656..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/download_package.py +++ /dev/null @@ -1,11 +0,0 @@ -from requests import get # to make GET request - -def download_from_url(url, file_name): - with open(file_name, "wb") as file: - response = get(url) - file.write(response.content) - -url = 'http://localhost:8080/cpp/download.php?file=json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz' - -print( download_from_url(url, "json-0.0.1514575489.676243933-macos_64-clang_9-debug-cmake.tar.gz") ) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py deleted file mode 100755 index e450ee0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_package.py +++ /dev/null @@ -1,26 +0,0 @@ -import os -import sys -import logging -import argparse -import urllib -import csv -import utils - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) - parameters = parser.parse_args() - - depends_file = parameters.depends - if os.path.exists(depends_file): - data = utils.deserialize(depends_file) - # data = utils.deserialize_json(depends_file) - else: - data = {} - if parameters.name in data: - print (data[parameters.name]) - sys.exit(0) - else: - sys.exit(1) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py deleted file mode 100644 index c407dd7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/get_return_code.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging - - -def set_state(rets, key, value): - if not key in rets: - rets[key] = value - else: - logging.warning('Received in pipeline multiples packages with same name and version: %s' % key) - set_state(rets, key + '_', value) - - -def get_return_code(parameters, rets): - def process(packages): - for node in packages: - try: - # process package - name = node.get_package_name() - version = node.get_version() - - if len(node.exceptions) > 0: - state = "EXCEPTION in %s" % node.fail_stage - elif node.interrupted: - state = "INTERRUPTED in %s" % node.fail_stage - elif (node.ret != 0): - state = "FAILED in %s" % node.fail_stage - else: - state = "OK" - - key = '%s - %s' % (name, version) - set_state(rets, key, state) - finally: - # send to next step - yield node - return process - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt deleted file mode 100644 index 2d06137..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/gwen/CMakeLists.txt +++ /dev/null @@ -1,47 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) - -include_directories(gwen/include) - -#ADD_DEFINITIONS(-DGWEN_COMPILE_STATIC -D_HAS_EXCEPTIONS=0 -D_STATIC_CPPLIB) -ADD_DEFINITIONS(-DGWEN_COMPILE_DLL) - -IF(WIN32) - -ELSE() - add_definitions(-std=c++11) -ENDIF() - -file(GLOB SOURCE_CODE1 gwen/src/*.cpp) -file(GLOB SOURCE_CODE2 gwen/src/Controls/*.cpp) -file(GLOB SOURCE_CODE3 gwen/src/Controls/Dialog/*.cpp) -file(GLOB SOURCE_CODE4 gwen/src/Platforms/*.cpp) - -add_library(${PACKAGE} SHARED ${SOURCE_CODE1} ${SOURCE_CODE2} ${SOURCE_CODE3} ${SOURCE_CODE4}) - -file(GLOB HEADER_CODE1 gwen/include/Gwen/*.h) -INSTALL( FILES ${HEADER_CODE1} - DESTINATION "include/${PACKAGE}") - -file(GLOB HEADER_CODE2 gwen/include/Gwen/Controls/*.h) -INSTALL( FILES ${HEADER_CODE2} - DESTINATION "include/${PACKAGE}/Controls") - -file(GLOB HEADER_CODE3 gwen/include/Gwen/Controls/Dialog/*.h) -INSTALL( FILES ${HEADER_CODE3} - DESTINATION "include/${PACKAGE}/Controls/Dialog") - -file(GLOB HEADER_CODE4 gwen/include/Gwen/Input/*.h) -INSTALL( FILES ${HEADER_CODE4} - DESTINATION "include/${PACKAGE}/Input") - -file(GLOB HEADER_CODE5 gwen/include/Gwen/Renderers/*.h) -INSTALL( FILES ${HEADER_CODE5} - DESTINATION "include/${PACKAGE}/Renderers") - -file(GLOB HEADER_CODE6 gwen/include/Gwen/Skins/*.h) -INSTALL( FILES ${HEADER_CODE6} - DESTINATION "include/${PACKAGE}/Skins") - -INSTALL( FILES gwen/bin/DefaultSkin.png - DESTINATION "bin") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py deleted file mode 100644 index f5e56cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/hash_version.py +++ /dev/null @@ -1,172 +0,0 @@ -import os -import contextlib -import utils -import time -from datetime import datetime -from utils import get_stdout -from email.utils import parsedate - - -def get_revision_svn(repo): - ''' - This command need svn in PATH - ''' - cmd = "svn info %s" % repo - for line in get_stdout(cmd): - if line.startswith('Last') or (line.startswith('Revisi') and (line.find('cambio') != -1)): - pos = line.rindex(':') - return int(line[pos+2:]) - return -1 - - -def get_timestamp_from_changeset(repo, changeset_searched): - ''' - generator of commits - ''' - with utils.working_directory(repo): - lines = [] - for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): - lines.append(line) - for line in reversed(lines): - chunks = line.split(";") - assert(len(chunks) == 2) - changeset = chunks[0] - timestamp = int(time.mktime(parsedate(chunks[1]))) - if changeset_searched == changeset: - return timestamp - raise Exception('Error in get timestamp from changeset {}'.format(changeset_searched)) - - -def git_log_gen(repo, number=1, extra=''): - ''' - generator of commits - ''' - with utils.working_directory(repo): - for line in get_stdout('git log -%d %s' % (number, extra)): - if line.startswith('commit'): - parts = line.split(' ') - assert(len(parts) == 2) - commit_name = parts[1] - yield commit_name - - -def get_changeset_git_from_position(repo, position = 0): - with utils.working_directory(repo): - i = 1 - lines = [] - for line in get_stdout('git log'): - lines.append(line) - for line in reversed(lines): - if line.startswith('commit'): - parts = line.split(' ') - assert(len(parts) == 2) - commit_name = parts[1] - if i == position: - return commit_name - else: - i += 1 - raise Exception('Error in get git hash from position {}'.format(position)) - - -def get_changeset_from_timestamp(repo, timestamp_searched): - with utils.working_directory(repo): - lines = [] - for line in get_stdout(r'git log --format="%H;%cd" --date=rfc'): - lines.append(line) - for line in reversed(lines): - chunks = line.split(";") - assert(len(chunks) == 2) - changeset = chunks[0] - timestamp = int(time.mktime(parsedate(chunks[1]))) - if timestamp_searched == timestamp: - return changeset - raise Exception('Error in get git hash from timestamp {}'.format(timestamp_searched)) - - -def get_position_git_from_changeset(repo, changeset): - with working_directory(repo): - i = 1 - lines = [] - for line in get_stdout('git log'): - lines.append(line) - for line in reversed(lines): - if line.startswith('commit'): - parts = line.split(' ') - if len(parts) == 2: - commit_name = parts[1] - if commit_name == changeset: - return i - else: - i += 1 - return -1 - - -def get_last_changeset(repo, short=False): - for changeset in git_log_gen(repo, number=1): - if short: - return changeset[:7] - else: - return changeset - return "" - - -def get_last_version(repo): - return to_cmaki_version(repo, get_last_changeset(repo)) - - -def rehash_simple(commit_name, position): - separator = '000' - return int(separator.join(list(str(ord(character)) for character in commit_name))) % position - - -@contextlib.contextmanager -def working_directory(path): - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) - - -def to_cmaki_version(repo, changeset): - ''' - git hash ----> 0.0.x.x - ''' - position = get_timestamp_from_changeset(repo, changeset) - hash_simple = rehash_simple(changeset, position) - versions = [] - versions.append('0') - versions.append('0') - versions.append(str(position)) - versions.append(str(hash_simple)) - return '.'.join(versions) - - -def to_git_version(repo, version): - ''' - 0.0.x.x ----> git hash - ''' - version = version.split('.') - assert(len(version) == 4) - position = int(version[2]) - pseudohash = int(version[3]) - changeset = get_changeset_from_timestamp(repo, position) - hash_simple = rehash_simple(changeset, position) - assert( get_timestamp_from_changeset(repo, changeset) == position ) - assert( hash_simple == pseudohash ) - return changeset - - -if __name__ == '__main__': - - local_path = r'/home/ricardo/dev/fast-event-system' - - for commit_name in git_log_gen(local_path, 10): - cmaki_version = to_cmaki_version(local_path, commit_name) - print ("%s -> %s" % (commit_name, cmaki_version)) - commit_name2 = to_git_version(local_path, cmaki_version) - print ("%s -> %s" % (cmaki_version, commit_name2)) - print () - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl deleted file mode 100644 index 8ba21f4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/CTest2JUnit.xsl +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - BuildName: - BuildStamp: - Name: - Generator: - CompilerName: - OSName: - Hostname: - OSRelease: - OSVersion: - OSPlatform: - Is64Bits: - VendorString: - VendorID: - FamilyID: - ModelID: - ProcessorCacheSize: - NumberOfLogicalCPU: - NumberOfPhysicalCPU: - TotalVirtualMemory: - TotalPhysicalMemory: - LogicalProcessorsPerPhysical: - ProcessorClockFrequency: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md deleted file mode 100644 index 4f989c6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/junit/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Source -https://bitbucket.org/shackra/ctest-jenkins/ - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt deleted file mode 100644 index bc1e512..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/Build/CMakeLists.txt +++ /dev/null @@ -1,687 +0,0 @@ -#=================================== -# Build script for libRocket ======= -#=================================== - -if(APPLE) - if(IOS_PLATFORM) - set(CMAKE_TOOLCHAIN_FILE cmake/Platform/iOS.cmake) - endif(IOS_PLATFORM) -endif(APPLE) - -# We use the new OSX_ARCHITECTURES property -# and GNUInstallDirs module -cmake_minimum_required(VERSION 2.8.5) - -if(COMMAND cmake_policy) - cmake_policy(SET CMP0015 NEW) -endif(COMMAND cmake_policy) - -project(libRocket C CXX) - -# paths -include(GNUInstallDirs) - -set(LIBROCKET_VERSION_MAJOR 1) -set(LIBROCKET_VERSION_MINOR 3) -set(LIBROCKET_VERSION_PATCH 0) -set(LIBROCKET_VERSION_TWEAK 0) -set(PROJECT_VERSION ${LIBROCKET_VERSION_MAJOR}.${LIBROCKET_VERSION_MINOR}.${LIBROCKET_VERSION_PATCH}.${LIBROCKET_VERSION_TWEAK}) - -# Search in the 'cmake' directory for additional CMake modules. -list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) - -# Old versions of CMake need some updated Modules, but we don't want -# to override newer versions of CMake which have working versions -if(CMAKE_MAJOR_VERSION LESS 3) - list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/v2fixes) -endif() - -#=================================== -# Environment tests ================ -#=================================== - -include(TestForANSIForScope) -include(TestForANSIStreamHeaders) -include(TestForSTDNamespace) - -#=================================== -# Provide hints as to where depends= -# might be found = -#=================================== - -if(NOT DEFINED ENV{FREETYPE_DIR}) - set(ENV{FREETYPE_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{Boost_DIR}) - set(ENV{Boost_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{LUA_DIR}) - set(ENV{LUA_DIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SDLDIR}) - set(ENV{SDLDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SDLIMAGEDIR}) - set(ENV{SDLIMAGEDIR} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -if(NOT DEFINED ENV{SFML_ROOT}) - set(ENV{SFML_ROOT} "${PROJECT_SOURCE_DIR}/../Dependencies") -endif() - -#=================================== -# Plaform specific global hacks ==== -#=================================== - -if(APPLE) - # Disables naked builtins from AssertMacros.h which - # This prevents naming collisions such as those from the check() - # function macro with LuaType::check - add_definitions(-D__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=0) -endif(APPLE) - -#=================================== -# Build options ==================== -#=================================== - -if(NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE Release CACHE STRING - "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." - FORCE) -endif() - -if(NOT IOS) - option(BUILD_SHARED_LIBS "Build shared libraries" ON) -endif(NOT IOS) - -option(BUILD_PYTHON_BINDINGS "Build python bindings" OFF) -option(BUILD_LUA_BINDINGS "Build Lua bindings" OFF) -option(BUILD_SAMPLES "Build samples" OFF) -if(WIN32) - option(SKIP_DIRECTX_SAMPLES "Skip build of all DirectX related samples. Only applies if BUILD_SAMPLES is ON" OFF) - option(SKIP_DIRECTX9_SAMPLE "Skip build of DirectX 9 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) - option(SKIP_DIRECTX10_SAMPLE "Skip build of DirectX 10 related sample. Only applies if BUILD_SAMPLES is ON and SKIP_DIRECTX_SAMPLES is OFF" OFF) -endif() - -if(IOS) - if(BUILD_SHARED_LIBS) - message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") - endif(BUILD_SHARED_LIBS) -endif(IOS) - -if(IOS) - if(BUILD_SHARED_LIBS) - message(FATAL_ERROR "BUILD_SHARED_LIBS must be OFF for iOS builds. iOS does not support shared libraries.") - endif(BUILD_SHARED_LIBS) -endif(IOS) - -if(NOT BUILD_SHARED_LIBS) - add_definitions(-DSTATIC_LIB) -endif() - -#on windows, check for VC10 and fix the multiple compile target issue. -IF(WIN32) - if(MSVC) - if(${MSVC_VERSION} STREQUAL 1600 OR ${MSVC_VERSION} STRGREATER 1600) - message("Visual Studio 2010 (${MSVC_VERSION}) build fix at play (/FORCE:MULTIPLE)") - set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /FORCE:MULTIPLE") - endif() - endif() -ENDIF(WIN32) - - -#=================================== -# Find dependencies ================ -#=================================== - -cmaki_find_package(dune-freetype) -include_directories(${DUNE-FREETYPE_INCLUDE_DIRS}) -list(APPEND CORE_LINK_LIBS ${DUNE-FREETYPE_LIBRARIES}) - -# # FreeType -# if(CMAKE_MAJOR_VERSION LESS 3) -# # Freetype changed the layout of its header files, we need to use -# # the FindFreetype module from cmake v3 at least, included here -# find_package(Freetype-v2fix REQUIRED) -# else() -# find_package(Freetype REQUIRED) -# endif() -# -# if(FREETYPE_FOUND) -# include_directories(${FREETYPE_INCLUDE_DIRS}) -# link_directories(${FREETYPE_LINK_DIRS}) -# list(APPEND CORE_LINK_LIBS ${FREETYPE_LIBRARY}) -# endif() -# mark_as_advanced(FREETYPE_INCLUDE_DIRS FREETYPE_LIBRARY FREETYPE_LINK_DIRECTORIES) - -# Boost and Python -if(BUILD_PYTHON_BINDINGS) - find_package(PythonInterp 2 REQUIRED) - find_package(PythonLibs 2 REQUIRED) - execute_process( - COMMAND ${PYTHON_EXECUTABLE} -c "from distutils import sysconfig; print(sysconfig.get_python_lib(1,0,prefix=''))" - OUTPUT_VARIABLE PYTHON_INSTDIR - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - if(PYTHONLIBS_FOUND) - include_directories(${PYTHON_INCLUDE_DIR}) - endif() - - #set(Boost_USE_STATIC_LIBS OFF) - #set(Boost_USE_MULTITHREADED ON) - find_package(Boost 1.40.0 COMPONENTS python REQUIRED) - if(Boost_FOUND) - include_directories(${Boost_INCLUDE_DIR}) - list(APPEND PY_BINDINGS_LINK_LIBS ${PYTHON_LIBRARY} ${Boost_LIBRARIES}) - endif() - -endif() - -#Lua -if(BUILD_LUA_BINDINGS) - if(CMAKE_MAJOR_VERSION LESS 3) - find_package(Lua-v2fix) - else() - find_package(Lua) - endif() - if(LUA_FOUND) - include_directories(${LUA_INCLUDE_DIR}) - list(APPEND LUA_BINDINGS_LINK_LIBS ${LUA_LIBRARIES}) - endif() -endif() - - -#=================================== -# Setup paths ====================== -#=================================== - -set(PROJECT_SOURCE_DIR ${PROJECT_SOURCE_DIR}/..) - -include_directories( - ${PROJECT_SOURCE_DIR}/Include -) - -# Include list of source files -include(FileList) - -#=================================== -# Build libraries ================== -#=================================== - -set(LIBRARIES Core Controls Debugger) - -foreach(library ${LIBRARIES}) - set(NAME Rocket${library}) - - add_library(${NAME} ${${library}_SRC_FILES} - ${${library}_HDR_FILES} - ${${library}_PUB_HDR_FILES} - ${MASTER_${library}_PUB_HDR_FILES} - ) - - set_target_properties(${NAME} PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${LIBROCKET_VERSION_MAJOR} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "i386;x86_64;" - ) - endif(NOT IOS) - endif(APPLE) - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - ) -endforeach(library) - -# Build python bindings -if(BUILD_PYTHON_BINDINGS) - set(LIBRARIES core controls) - - foreach(library ${LIBRARIES}) - set(NAME _rocket${library}) - - add_library(${NAME} MODULE ${Py${library}_SRC_FILES} - ${Py${library}_HDR_FILES} - ${Py${library}_PUB_HDR_FILES} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" - ) - endif(NOT IOS) - endif(APPLE) - - set_target_properties(${NAME} PROPERTIES PREFIX "") - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${PYTHON_INSTDIR} - ) - endforeach(library) -endif() - -# Build Lua bindings -if(BUILD_LUA_BINDINGS) - set(LIBRARIES Core Controls) - - foreach(library ${LIBRARIES}) - set(NAME Rocket${library}Lua) - - add_library(${NAME} ${Lua${library}_SRC_FILES} - ${Lua${library}_HDR_FILES} - ${Lua${library}_PUB_HDR_FILES} - ) - - set_target_properties(${NAME} PROPERTIES - VERSION ${PROJECT_VERSION} - SOVERSION ${LIBROCKET_VERSION_MAJOR} - ) - - if(APPLE) - if(NOT IOS) - set_target_properties(${NAME} PROPERTIES - OSX_ARCHITECTURES "$(ARCHS_STANDARD_32_64_BIT)" - ) - endif(NOT IOS) - endif(APPLE) - - install(TARGETS ${NAME} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - ) - endforeach(library) -endif() - - -#=================================== -# Link libraries =================== -#=================================== - -target_link_libraries(RocketCore ${CORE_LINK_LIBS}) -target_link_libraries(RocketControls RocketCore) -target_link_libraries(RocketDebugger RocketCore) - -if(BUILD_PYTHON_BINDINGS) - target_link_libraries(_rocketcore RocketCore ${PY_BINDINGS_LINK_LIBS}) - target_link_libraries(_rocketcontrols RocketControls ${PY_BINDINGS_LINK_LIBS}) -endif() - -if(BUILD_LUA_BINDINGS) - target_link_libraries(RocketCoreLua RocketCore ${LUA_BINDINGS_LINK_LIBS}) - target_link_libraries(RocketControlsLua RocketControls RocketCoreLua ${LUA_BINDINGS_LINK_LIBS}) -endif() - - -#=================================== -# Build samples ==================== -#=================================== - -# Build and link the samples -macro(bl_sample NAME) - if (WIN32) - add_executable(${NAME} WIN32 ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - elseif(APPLE) - add_executable(${NAME} MACOSX_BUNDLE ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - else() - add_executable(${NAME} ${${NAME}_SRC_FILES} ${${NAME}_HDR_FILES} ) - endif() - - if (APPLE) - # We only support i386 for the samples as it still uses Carbon - set_target_properties(${NAME} PROPERTIES OSX_ARCHITECTURES "i386;" ) - endif() - - target_link_libraries(${NAME} ${ARGN}) -endmacro() - -if(BUILD_SAMPLES) - include(SampleFileList) - - set(samples treeview customlog drag loaddocument) - set(tutorials template datagrid datagrid_tree tutorial_drag) - - set(sample_LIBRARIES - shell - RocketCore - RocketControls - RocketDebugger - ) - - # Find OpenGL - find_package(OpenGL REQUIRED) - - if(OPENGL_FOUND) - include_directories(${OPENGL_INCLUDE_DIR}) - list(APPEND sample_LIBRARIES ${OPENGL_LIBRARIES}) - endif() - - # Set up required system libraries - if(WIN32) - if(SKIP_DIRECTX_SAMPLES) - message("-- Skipping all DirectX samples") - set(SKIP_DIRECTX9_SAMPLE ON) - set(SKIP_DIRECTX10_SAMPLE ON) - else() - message("-- Determing if DirectX samples can be built") - include(FindDirectX) - find_package(DirectX) - if(DirectX_FOUND) - set(DIRECTX_SAMPLE_LIST) - set(DIRECTX_SKIPPED_SAMPLE_LIST) - - # We should be able to build DirectX 9 sample - message("-- Determing if DirectX samples can be built - Yes") - - if(SKIP_DIRECTX9_SAMPLE) - message("-- Skipping build of DirectX 9 sample: User disabled") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - else() - if(DirectX_LIBRARY) - if(DirectX_D3DX9_LIBRARY) - list(APPEND DIRECTX_SAMPLE_LIST "DirectX9 ") - else() - set(SKIP_DIRECTX9_SAMPLE ON) - message("-- Skipping build of DirectX 9 sample: DirectX_D3DX9_LIBRARY not found") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - endif() - else() - set(SKIP_DIRECTX9_SAMPLE ON) - message("-- Skipping build of DirectX 9 sample: DirectX_LIBRARY not found") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX9 ") - endif() - endif() - - if(SKIP_DIRECTX10_SAMPLE) - message("-- Skipping build of DirectX 10 sample: User disabled") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") - else() - if(DirectX_D3D10_FOUND) - list(APPEND DIRECTX_SAMPLE_LIST "DirectX10 ") - else() - set(SKIP_DIRECTX10_SAMPLE ON) - message("-- Skipping build of DirectX 10 sample: Missing DirectX_D3D10_INCLUDE_DIR, DirectX_D3D10_LIBRARY or DirectX_D3DX10_LIBRARY") - list(APPEND DIRECTX_SKIPPED_SAMPLE_LIST "DirectX10 ") - endif() - endif() - - - if(DIRECTX_SAMPLE_LIST) - message("-- Enabled DirectX samples: " ${DIRECTX_SAMPLE_LIST}) - endif() - if(DIRECTX_SKIPPED_SAMPLE_LIST) - message("-- Disabled DirectX samples: " ${DIRECTX_SKIPPED_SAMPLE_LIST}) - endif() - else() - message("-- Determing if DirectX samples can be built - No") - set(SKIP_DIRECTX9_SAMPLE ON) - set(SKIP_DIRECTX10_SAMPLE ON) - endif() - endif() - elseif(APPLE) - include(FindCarbon) - find_package(Carbon REQUIRED) - - if (Carbon_FOUND) - include_directories(${Carbon_INCLUDE_DIR}) - list(APPEND sample_LIBRARIES ${Carbon_LIBRARIES}) - endif() - else() - find_package(X11 REQUIRED) - if (X11_FOUND) - list(APPEND sample_LIBRARIES ${X11_LIBRARIES}) - # shell/src/x11/InputX11.cpp:InitialiseX11Keymap uses Xkb if - # possible instead of XGetKeyboardMapping for performance - if(X11_Xkb_FOUND) - FIND_PACKAGE_MESSAGE(X11 "Found X11 KBlib: ${X11_X11_LIB}" "[${X11_X11_LIB}][${X11_XkbINCLUDE_DIR}]") - add_definitions(-DHAS_X11XKBLIB) - endif() - endif() - endif() - - set(SAMPLES_DIR opt/Rocket/Samples CACHE PATH "path to samples dir") - - # The samples and tutorials use the shell library - include_directories(${PROJECT_SOURCE_DIR}/Samples/shell/include) - - # Build and install sample shell library - add_library(shell STATIC ${shell_SRC_FILES} ${shell_HDR_FILES}) - if (APPLE) - # We only support i386 for the samples as it still uses Carbon - set_target_properties(shell PROPERTIES OSX_ARCHITECTURES "i386;") - endif() - - # Build and install the basic samples - foreach(sample ${samples}) - bl_sample(${sample} ${sample_LIBRARIES}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/${sample}) - install(TARGETS ${sample} - RUNTIME DESTINATION ${SAMPLES_DIR}/${sample} - BUNDLE DESTINATION ${SAMPLES_DIR}) - endforeach() - - if(WIN32) - if(NOT SKIP_DIRECTX9_SAMPLE) - include_directories(${DirectX_INCLUDE_DIR}) - - bl_sample(directx ${sample_LIBRARIES} ${DirectX_LIBRARY} ${DirectX_D3DX9_LIBRARY}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx) - install(TARGETS directx - RUNTIME DESTINATION ${SAMPLES_DIR}/directx - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - if(NOT SKIP_DIRECTX10_SAMPLE) - include_directories(${DirectX_INCLUDE_DIR} ${DirectX_D3D10_INCLUDE_DIRS}) - - bl_sample(directx10 ${sample_LIBRARIES} ${DirectX_D3D10_LIBRARIES}) - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/directx10) - install(TARGETS directx10 - RUNTIME DESTINATION ${SAMPLES_DIR}/directx10 - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - endif() - - message("-- Can SDL2 sample be built") - find_package(SDL) - if(SDL_FOUND) - find_package(SDL_image) - if(SDL_IMAGE_FOUND) - find_package(GLEW) - if(GLEW_FOUND) - message("-- Can SDL2 sample be built - yes") - include_directories(${SDL_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) - - bl_sample(sdl2 ${sample_LIBRARIES} ${SDL_LIBRARY} ${SDL_IMAGE_LIBRARY} ${GLEW_LIBRARY}) - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sdl2) - install(TARGETS sdl2 - RUNTIME DESTINATION ${SAMPLES_DIR}/sdl2 - BUNDLE DESTINATION ${SAMPLES_DIR}) - else() - message("-- Can SDL2 sample be built - GLEW not found") - endif() - else() - message("-- Can SDL2 sample be built - SDL2_image not found") - endif() - else() - message("-- Can SDL2 sample be built - SDL2 not found") - endif() - - - message("-- Can SFML 1.x sample be built") - find_package(SFML 1 COMPONENTS graphics window system) - if(NOT SFML_FOUND) - message("-- Can SFML 1.x sample be built - no") - elseif(SFML_VERSION_MAJOR GREATER 1) - message("-- Can SFML 1.x sample be built - no: Version 2 detected") - else() - message("-- Can SFML 1.x sample be built - yes") - - include_directories(${SFML_INCLUDE_DIR}) - - bl_sample(sfml ${sample_LIBRARIES} ${SFML_LIBRARIES}) - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml) - install(TARGETS sfml - RUNTIME DESTINATION ${SAMPLES_DIR}/sfml - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - message("-- Can SFML 2.x sample be built") - find_package(SFML 2 COMPONENTS graphics window system) - if(NOT SFML_FOUND) - message("-- Can SFML 2.x sample be built - no") - else() - find_package(GLEW) - if(GLEW_FOUND) - message("-- Can SFML 2.x sample be built - yes: with GLEW") - include_directories(${SFML_INCLUDE_DIR} ${GLEW_INCLUDE_DIR}) - add_definitions( -DENABLE_GLEW ) - bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES} ${GLEW_LIBRARY}) - else() - message("-- Can SFML 2.x sample be built - yes: without GLEW") - include_directories(${SFML_INCLUDE_DIR}) - bl_sample(sfml2 ${sample_LIBRARIES} ${SFML_LIBRARIES}) - endif() - - # The samples always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/basic/sfml2) - install(TARGETS sfml2 - RUNTIME DESTINATION ${SAMPLES_DIR}/sfml2 - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - # Build and install the tutorials - foreach(tutorial ${tutorials}) - bl_sample(${tutorial} ${sample_LIBRARIES}) - - # The tutorials always set this as their current working directory - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/tutorial/${tutorial}) - install(TARGETS ${tutorial} - RUNTIME DESTINATION ${SAMPLES_DIR}/${tutorial} - BUNDLE DESTINATION ${SAMPLES_DIR}) - endforeach() - - # Build and install invaders sample - bl_sample(invaders ${sample_LIBRARIES}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/invaders) - install(TARGETS invaders - RUNTIME DESTINATION ${SAMPLES_DIR}/invaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - - if(BUILD_PYTHON_BINDINGS) - # Build and install pyinvaders sample - bl_sample(pyinvaders ${sample_LIBRARIES} ${PYTHON_LIBRARIES} ${PY_BINDINGS_LINK_LIBS}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/pyinvaders) - install(TARGETS pyinvaders - RUNTIME DESTINATION ${SAMPLES_DIR}/pyinvaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() - - if(BUILD_LUA_BINDINGS) - bl_sample(luainvaders RocketCoreLua RocketControlsLua ${sample_LIBRARIES} ${LUA_BINDINGS_LINK_LIBS}) - install(DIRECTORY DESTINATION ${SAMPLES_DIR}/luainvaders) - install(TARGETS luainvaders - RUNTIME DESTINATION ${SAMPLES_DIR}/luainvaders - BUNDLE DESTINATION ${SAMPLES_DIR}) - endif() -endif() - - -#=================================== -# Installation ===================== -#=================================== - -if(BUILD_LUA_BINDINGS AND BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - ) -else() - if(NOT BUILD_LUA_BINDINGS AND NOT BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Python" EXCLUDE - PATTERN "Lua" EXCLUDE - ) - else() - if(BUILD_PYTHON_BINDINGS) - install(FILES ${PROJECT_SOURCE_DIR}/bin/rocket.py - DESTINATION ${PYTHON_INSTDIR} - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Lua" EXCLUDE - ) - else() - if(BUILD_LUA_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Include/Rocket - DESTINATION include - PATTERN "Python" EXCLUDE - ) - else() - message(FATAL_ERROR "ASSERT: Unexpected option combination, this is a logical impossibility.") - endif() - endif() - endif() -endif() - -if(BUILD_SAMPLES) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/assets - DESTINATION ${SAMPLES_DIR} - ) - - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/template/data - DESTINATION ${SAMPLES_DIR}/tutorial/template - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid/data - DESTINATION ${SAMPLES_DIR}/tutorial/datagrid - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/datagrid_tree/data - DESTINATION ${SAMPLES_DIR}/tutorial/datagrid_tree - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/tutorial/tutorial_drag/data - DESTINATION ${SAMPLES_DIR}/tutorial/tutorial_drag - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/treeview/data - DESTINATION ${SAMPLES_DIR}/basic/treeview - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/basic/drag/data - DESTINATION ${SAMPLES_DIR}/basic/drag - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/invaders/data - DESTINATION ${SAMPLES_DIR}/invaders - ) - - if(BUILD_PYTHON_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/pyinvaders/data - DESTINATION ${SAMPLES_DIR}/pyinvaders - ) - endif() - - if(BUILD_LUA_BINDINGS) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/data - DESTINATION ${SAMPLES_DIR}/luainvaders - ) - install(DIRECTORY ${PROJECT_SOURCE_DIR}/Samples/luainvaders/lua - DESTINATION ${SAMPLES_DIR}/luainvaders - ) - endif() -endif() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt deleted file mode 100644 index f4493c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/librocket/CMakeLists.txt +++ /dev/null @@ -1,2 +0,0 @@ -add_subdirectory(Build) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt deleted file mode 100644 index 4ccb85d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/noise/CMakeLists.txt +++ /dev/null @@ -1,26 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -cmake_policy(SET CMP0011 NEW) -project(noise CXX) - -# http://sourceforge.net/projects/libnoise - -file(GLOB SOURCE_CODE src/*.cpp src/*.h src/model/*.cpp src/model/*.h src/module/*.cpp src/module/*.h) -include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src) -include_directories(${CMAKE_CURRENT_SOURCE_DIR}/etc) -add_library(${PACKAGE} SHARED ${SOURCE_CODE}) - -#IF(MSVC) -# add_definitions(/nologo /c /D_CRT_SECURE_NO_DEPRECATE) -#ENDIF() - -file(GLOB HEADER_CODE src/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}") - -file(GLOB HEADER_CODE src/model/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}/model") - -file(GLOB HEADER_CODE src/module/*.h ) -INSTALL( FILES ${HEADER_CODE} - DESTINATION "include/${PACKAGE}/module") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp deleted file mode 100644 index 08c2a9f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/FFConsoleDemo.cpp +++ /dev/null @@ -1,1147 +0,0 @@ -#include "OIS.h" - -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -////////////////////////////////////Needed Windows Headers//////////// -#if defined OIS_WIN32_PLATFORM -# define WIN32_LEAN_AND_MEAN -# include "windows.h" -# include "resource.h" - -////////////////////////////////////Needed Linux Headers////////////// -#elif defined OIS_LINUX_PLATFORM -# include -# include -#else -# error Sorry, not yet implemented on this platform. -#endif - - -using namespace OIS; - -#if defined OIS_WIN32_PLATFORM - -// The dialog proc we have to give to CreateDialog -LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) -{ - return FALSE; -} - -#endif - -//////////// Event handler class declaration //////////////////////////////////////////////// -class Application; -class JoystickManager; -class EffectManager; - -class EventHandler : public KeyListener, public JoyStickListener -{ - protected: - - Application* _pApplication; - JoystickManager* _pJoystickMgr; - EffectManager* _pEffectMgr; - - public: - - EventHandler(Application* pApp); - void initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr); - - bool keyPressed( const KeyEvent &arg ); - bool keyReleased( const KeyEvent &arg ); - - bool buttonPressed( const JoyStickEvent &arg, int button ); - bool buttonReleased( const JoyStickEvent &arg, int button ); - - bool axisMoved( const JoyStickEvent &arg, int axis ); - - bool povMoved( const JoyStickEvent &arg, int pov ); -}; - -//////////// Variable classes //////////////////////////////////////////////////////// - -class Variable -{ - protected: - - double _dInitValue; - double _dValue; - - public: - - Variable(double dInitValue) : _dInitValue(dInitValue) { reset(); } - - double getValue() const { return _dValue; } - - void reset() { _dValue = _dInitValue; } - - virtual void setValue(double dValue) { _dValue = dValue; } - - virtual string toString() const - { - ostringstream oss; - oss << _dValue; - return oss.str(); - } - - virtual void update() {}; -}; - -class Constant : public Variable -{ - public: - - Constant(double dInitValue) : Variable(dInitValue) {} - - virtual void setValue(double dValue) { } - -}; - -class LimitedVariable : public Variable -{ - protected: - - double _dMinValue; - double _dMaxValue; - - public: - - LimitedVariable(double dInitValue, double dMinValue, double dMaxValue) - : _dMinValue(dMinValue), _dMaxValue(dMaxValue), Variable(dInitValue) - {} - - virtual void setValue(double dValue) - { - _dValue = dValue; - if (_dValue > _dMaxValue) - _dValue = _dMaxValue; - else if (_dValue < _dMinValue) - _dValue = _dMinValue; - } - -/* virtual string toString() const - { - ostringstream oss; - oss << setiosflags(ios_base::right) << setw(4) - << (int)(200.0 * getValue()/(_dMaxValue - _dMinValue)); // [-100%, +100%] - return oss.str(); - }*/ -}; - -class TriangleVariable : public LimitedVariable -{ - protected: - - double _dDeltaValue; - - public: - - TriangleVariable(double dInitValue, double dDeltaValue, double dMinValue, double dMaxValue) - : LimitedVariable(dInitValue, dMinValue, dMaxValue), _dDeltaValue(dDeltaValue) {}; - - virtual void update() - { - double dValue = getValue() + _dDeltaValue; - if (dValue > _dMaxValue) - { - dValue = _dMaxValue; - _dDeltaValue = -_dDeltaValue; - //cout << "Decreasing variable towards " << _dMinValue << endl; - } - else if (dValue < _dMinValue) - { - dValue = _dMinValue; - _dDeltaValue = -_dDeltaValue; - //cout << "Increasing variable towards " << _dMaxValue << endl; - } - setValue(dValue); - //cout << "TriangleVariable::update : delta=" << _dDeltaValue << ", value=" << dValue << endl; - } -}; - -//////////// Variable effect class ////////////////////////////////////////////////////////// - -typedef map MapVariables; -typedef void (*EffectVariablesApplier)(MapVariables& mapVars, Effect* pEffect); - -class VariableEffect -{ - protected: - - // Effect description - const char* _pszDesc; - - // The associate OIS effect - Effect* _pEffect; - - // The effect variables. - MapVariables _mapVariables; - - // The effect variables applier function. - EffectVariablesApplier _pfApplyVariables; - - // True if the effect is currently being played. - bool _bActive; - - public: - - VariableEffect(const char* pszDesc, Effect* pEffect, - const MapVariables& mapVars, const EffectVariablesApplier pfApplyVars) - : _pszDesc(pszDesc), _pEffect(pEffect), - _mapVariables(mapVars), _pfApplyVariables(pfApplyVars), _bActive(false) - {} - - ~VariableEffect() - { - if (_pEffect) - delete _pEffect; - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - if (iterVars->second) - delete iterVars->second; - - } - - void setActive(bool bActive = true) - { - reset(); - _bActive = bActive; - } - - bool isActive() - { - return _bActive; - } - - Effect* getFFEffect() - { - return _pEffect; - } - - const char* getDescription() const - { - return _pszDesc; - } - - void update() - { - if (isActive()) - { - // Update the variables. - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - iterVars->second->update(); - - // Apply the updated variable values to the effect. - _pfApplyVariables(_mapVariables, _pEffect); - } - } - - void reset() - { - MapVariables::iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - iterVars->second->reset(); - _pfApplyVariables(_mapVariables, _pEffect); - } - - string toString() const - { - string str; - MapVariables::const_iterator iterVars; - for (iterVars = _mapVariables.begin(); iterVars != _mapVariables.end(); iterVars++) - str += iterVars->first + ":" + iterVars->second->toString() + " "; - return str; - } -}; - -//////////// Joystick manager class //////////////////////////////////////////////////////// - -class JoystickManager -{ - protected: - - // Input manager. - InputManager* _pInputMgr; - - // Vectors to hold joysticks and associated force feedback devices - vector _vecJoys; - vector _vecFFDev; - - // Selected joystick - int _nCurrJoyInd; - - // Force feedback detected ? - bool _bFFFound; - - // Selected joystick master gain. - float _dMasterGain; - - // Selected joystick auto-center mode. - bool _bAutoCenter; - - public: - - JoystickManager(InputManager* pInputMgr, EventHandler* pEventHdlr) - : _pInputMgr(pInputMgr), _nCurrJoyInd(-1), _dMasterGain(0.5), _bAutoCenter(true) - - { - _bFFFound = false; - for( int nJoyInd = 0; nJoyInd < pInputMgr->getNumberOfDevices(OISJoyStick); ++nJoyInd ) - { - //Create the stick - JoyStick* pJoy = (JoyStick*)pInputMgr->createInputObject( OISJoyStick, true ); - cout << endl << "Created buffered joystick #" << nJoyInd << " '" << pJoy->vendor() - << "' (Id=" << pJoy->getID() << ")"; - - // Check for FF, and if so, keep the joy and dump FF info - ForceFeedback* pFFDev = (ForceFeedback*)pJoy->queryInterface(Interface::ForceFeedback ); - if( pFFDev ) - { - _bFFFound = true; - - // Keep the joy to play with it. - pJoy->setEventCallback(pEventHdlr); - _vecJoys.push_back(pJoy); - - // Keep also the associated FF device - _vecFFDev.push_back(pFFDev); - - // Dump FF supported effects and other info. - cout << endl << " * Number of force feedback axes : " - << pFFDev->getFFAxesNumber() << endl; - const ForceFeedback::SupportedEffectList &lstFFEffects = - pFFDev->getSupportedEffects(); - if (lstFFEffects.size() > 0) - { - cout << " * Supported effects :"; - ForceFeedback::SupportedEffectList::const_iterator itFFEff; - for(itFFEff = lstFFEffects.begin(); itFFEff != lstFFEffects.end(); ++itFFEff) - cout << " " << Effect::getEffectTypeName(itFFEff->second); - cout << endl << endl; - } - else - cout << "Warning: no supported effect found !" << endl; - } - else - { - cout << " (no force feedback support detected) => ignored." << endl << endl; - _pInputMgr->destroyInputObject(pJoy); - } - } - } - - ~JoystickManager() - { - for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) - _pInputMgr->destroyInputObject( _vecJoys[nJoyInd] ); - } - - size_t getNumberOfJoysticks() const - { - return _vecJoys.size(); - } - - bool wasFFDetected() const - { - return _bFFFound; - } - - enum EWhichJoystick { ePrevious=-1, eNext=+1 }; - - void selectJoystick(EWhichJoystick eWhich) - { - // Note: Reset the master gain to half the maximum and autocenter mode to Off, - // when really selecting a new joystick. - if (_nCurrJoyInd < 0) - { - _nCurrJoyInd = 0; - _dMasterGain = 0.5; // Half the maximum. - changeMasterGain(0.0); - } - else - { - _nCurrJoyInd += eWhich; - if (_nCurrJoyInd < -1 || _nCurrJoyInd >= (int)_vecJoys.size()) - _nCurrJoyInd = -1; - if (_vecJoys.size() > 1 && _nCurrJoyInd >= 0) - { - _dMasterGain = 0.5; // Half the maximum. - changeMasterGain(0.0); - } - } - } - - ForceFeedback* getCurrentFFDevice() - { - return (_nCurrJoyInd >= 0) ? _vecFFDev[_nCurrJoyInd] : 0; - } - - void changeMasterGain(float dDeltaPercent) - { - if (_nCurrJoyInd >= 0) - { - _dMasterGain += dDeltaPercent / 100; - if (_dMasterGain > 1.0) - _dMasterGain = 1.0; - else if (_dMasterGain < 0.0) - _dMasterGain = 0.0; - - _vecFFDev[_nCurrJoyInd]->setMasterGain(_dMasterGain); - } - } - - enum EAutoCenterHow { eOff, eOn, eToggle }; - - void changeAutoCenter(EAutoCenterHow eHow = eToggle) - { - if (_nCurrJoyInd >= 0) - { - if (eHow == eToggle) - _bAutoCenter = !_bAutoCenter; - else - _bAutoCenter = (eHow == eOn ? true : false); - _vecFFDev[_nCurrJoyInd]->setAutoCenterMode(_bAutoCenter); - } - } - - void captureEvents() - { - // This fires off buffered events for each joystick we have - for(size_t nJoyInd = 0; nJoyInd < _vecJoys.size(); ++nJoyInd) - if( _vecJoys[nJoyInd] ) - _vecJoys[nJoyInd]->capture(); - } - - string toString() const - { - // Warning: Wrong result if more than 10 joysticks ... - ostringstream oss; - oss << "Joy:" << (_nCurrJoyInd >= 0 ? (char)('0' + _nCurrJoyInd) : '-'); - oss << " Gain:" << setiosflags(ios_base::right) << setw(3) << (int)(_dMasterGain*100); - oss << "% Center:" << (_bAutoCenter ? " On " : "Off"); - return oss.str(); - } -}; - -//////////// Effect variables applier functions ///////////////////////////////////////////// -// These functions apply the given Variables to the given OIS::Effect - -// Variable force "Force" + optional "AttackFactor" constant, on a OIS::ConstantEffect -void forceVariableApplier(MapVariables& mapVars, Effect* pEffect) -{ - double dForce = mapVars["Force"]->getValue(); - double dAttackFactor = 1.0; - if (mapVars.find("AttackFactor") != mapVars.end()) - dAttackFactor = mapVars["AttackFactor"]->getValue(); - - ConstantEffect* pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = (int)dForce; - pConstForce->envelope.attackLevel = (unsigned short)fabs(dForce*dAttackFactor); - pConstForce->envelope.fadeLevel = (unsigned short)fabs(dForce); // Fade never reached, in fact. -} - -// Variable "Period" on an OIS::PeriodicEffect -void periodVariableApplier(MapVariables& mapVars, Effect* pEffect) -{ - double dPeriod = mapVars["Period"]->getValue(); - - PeriodicEffect* pPeriodForce = dynamic_cast(pEffect->getForceEffect()); - pPeriodForce->period = (unsigned int)dPeriod; -} - - -//////////// Effect manager class ////////////////////////////////////////////////////////// - -class EffectManager -{ - protected: - - // The joystick manager - JoystickManager* _pJoystickMgr; - - // Vector to hold variable effects - vector _vecEffects; - - // Selected effect - int _nCurrEffectInd; - - // Update frequency (Hz) - unsigned int _nUpdateFreq; - - // Indexes (in _vecEffects) of the variable effects that are playable by the selected joystick. - vector _vecPlayableEffectInd; - - - public: - - EffectManager(JoystickManager* pJoystickMgr, unsigned int nUpdateFreq) - : _pJoystickMgr(pJoystickMgr), _nUpdateFreq(nUpdateFreq), _nCurrEffectInd(-1) - { - Effect* pEffect; - MapVariables mapVars; - ConstantEffect* pConstForce; - PeriodicEffect* pPeriodForce; - - // Please don't modify or remove effects (unless there is some bug ...) : - // add new ones to enhance the test repository. - // And feel free to add any tested device, even when the test failed ! - // Tested devices capabilities : - // - Logitech G25 Racing wheel : - // * Only 1 axis => no directional 2D effect (only left and right) - // * Full support for constant force under WinXPSP2DX9 and Linux 2.6.22.9 - // * Full support for periodic forces under WinXPSP2DX9 - // (but poor rendering under 20ms period), and no support under Linux 2.6.22.9 - // * Full support reported (not tested) for all other forces under WinXPSP2DX9, - // and no support under Linux 2.6.22.9 - // - Logitech Rumble pad 2 : - // * Only 1 axis => no directional 2D effect (only left and right) - // * Forces amplitude is rendered through the inertia motors rotation frequency - // (stronger force => quicker rotation) - // * 2 inertia motors : 1 with small inertia, 1 with "heavy" one. - // => poor force feedback rendering ... - // * Support (poor) for all OIS forces under WinXPSP2DX9, - // and only for Triangle, Square and Sine periodic forces under Linux 2.6.22.9 - // (reported by enumeration, but does not seem to work actually) - // Master gain setting tests: - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // Auto-center mode setting tests: - // - Logitech G25 Racing wheel : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. - // - Logitech Rumble pad 2 : WinXPSP2DX9=Failed (DINPUT?), Linux2.6.22.9=Reported as not supported. - - // 1) Constant force on 1 axis with 20s-period triangle oscillations in [-10K, +10K]. - // Notes: Linux: replay_length: no way to get it to work if not 0 or Effect::OIS_INFINITE - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux2.6.22.9=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (but only light motor involved), - // Linux2.6.22.9=Not supported - pEffect = new Effect(Effect::ConstantForce, Effect::Constant); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; // Linux/Win32: Same behaviour as 0. - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = 5000; //-10K to +10k - pConstForce->envelope.attackLength = 0; - pConstForce->envelope.attackLevel = (unsigned short)pConstForce->level; - pConstForce->envelope.fadeLength = 0; - pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; - - mapVars.clear(); - mapVars["Force"] = - new TriangleVariable(0.0, // F0 - 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle - -10000.0, // Fmin - 10000.0); // Fmax - mapVars["AttackFactor"] = new Constant(1.0); - - _vecEffects.push_back - (new VariableEffect - ("Constant force on 1 axis with 20s-period triangle oscillations " - "of its signed amplitude in [-10K, +10K]", - pEffect, mapVars, forceVariableApplier)); - - // 2) Constant force on 1 axis with noticeable attack - // with 20s-period triangle oscillations in [-10K, +10K]. - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK (including attack, but only light motor involved), - // Linux2.6.22.9=Not supported. - pEffect = new Effect(Effect::ConstantForce, Effect::Constant); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; //(unsigned int)(1000000.0/_nUpdateFreq); // Linux: Does not work. - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pConstForce = dynamic_cast(pEffect->getForceEffect()); - pConstForce->level = 5000; //-10K to +10k - pConstForce->envelope.attackLength = (unsigned int)(1000000.0/_nUpdateFreq/2); - pConstForce->envelope.attackLevel = (unsigned short)(pConstForce->level*0.1); - pConstForce->envelope.fadeLength = 0; // Never reached, actually. - pConstForce->envelope.fadeLevel = (unsigned short)pConstForce->level; // Idem - - mapVars.clear(); - mapVars["Force"] = - new TriangleVariable(0.0, // F0 - 4*10000/_nUpdateFreq / 20.0, // dF for a 20s-period triangle - -10000.0, // Fmin - 10000.0); // Fmax - mapVars["AttackFactor"] = new Constant(0.1); - - _vecEffects.push_back - (new VariableEffect - ("Constant force on 1 axis with noticeable attack (app update period / 2)" - "and 20s-period triangle oscillations of its signed amplitude in [-10K, +10K]", - pEffect, mapVars, forceVariableApplier)); - - // 3) Triangle periodic force on 1 axis with 40s-period triangle oscillations - // of its period in [10, 400] ms, and constant amplitude - // Tested devices : - // - Logitech G25 Racing wheel : WinXPSP2DX9=OK, Linux=OK. - // - Logitech Rumble pad 2 : WinXPSP2DX9=OK but only light motor involved, - // Linux2.6.22.9=Failed. - pEffect = new Effect(Effect::PeriodicForce, Effect::Triangle); - pEffect->direction = Effect::North; - pEffect->trigger_button = 0; - pEffect->trigger_interval = 0; - pEffect->replay_length = Effect::OIS_INFINITE; - pEffect->replay_delay = 0; - pEffect->setNumAxes(1); - pPeriodForce = dynamic_cast(pEffect->getForceEffect()); - pPeriodForce->magnitude = 10000; // 0 to +10k - pPeriodForce->offset = 0; - pPeriodForce->phase = 0; // 0 to 35599 - pPeriodForce->period = 10000; // Micro-seconds - pPeriodForce->envelope.attackLength = 0; - pPeriodForce->envelope.attackLevel = (unsigned short)pPeriodForce->magnitude; - pPeriodForce->envelope.fadeLength = 0; - pPeriodForce->envelope.fadeLevel = (unsigned short)pPeriodForce->magnitude; - - mapVars.clear(); - mapVars["Period"] = - new TriangleVariable(1*1000.0, // P0 - 4*(400-10)*1000.0/_nUpdateFreq / 40.0, // dP for a 40s-period triangle - 10*1000.0, // Pmin - 400*1000.0); // Pmax - _vecEffects.push_back - (new VariableEffect - ("Periodic force on 1 axis with 40s-period triangle oscillations " - "of its period in [10, 400] ms, and constant amplitude", - pEffect, mapVars, periodVariableApplier)); - - } - - ~EffectManager() - { - vector::iterator iterEffs; - for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) - delete *iterEffs; - } - - void updateActiveEffects() - { - vector::iterator iterEffs; - for (iterEffs = _vecEffects.begin(); iterEffs != _vecEffects.end(); iterEffs++) - if ((*iterEffs)->isActive()) - { - (*iterEffs)->update(); - _pJoystickMgr->getCurrentFFDevice()->modify((*iterEffs)->getFFEffect()); - } - } - - void checkPlayableEffects() - { - // Nothing to do if no joystick currently selected - if (!_pJoystickMgr->getCurrentFFDevice()) - return; - - // Get the list of indexes of effects that the selected device can play - _vecPlayableEffectInd.clear(); - for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) - { - const Effect::EForce eForce = _vecEffects[nEffInd]->getFFEffect()->force; - const Effect::EType eType = _vecEffects[nEffInd]->getFFEffect()->type; - if (_pJoystickMgr->getCurrentFFDevice()->supportsEffect(eForce, eType)) - { - _vecPlayableEffectInd.push_back(nEffInd); - } - } - - // Print details about playable effects - if (_vecPlayableEffectInd.empty()) - { - cout << endl << endl << "The device can't play any effect of the test set" << endl; - } - else - { - cout << endl << endl << "Selected device can play the following effects :" << endl; - for (size_t nEffIndInd = 0; nEffIndInd < _vecPlayableEffectInd.size(); nEffIndInd++) - printEffect(_vecPlayableEffectInd[nEffIndInd]); - cout << endl; - } - } - - enum EWhichEffect { ePrevious=-1, eNone=0, eNext=+1 }; - - void selectEffect(EWhichEffect eWhich) - { - - // Nothing to do if no joystick currently selected - if (!_pJoystickMgr->getCurrentFFDevice()) - { - cout << "\nNo Joystick selected.\n"; - return; - } - - // Nothing to do if joystick cannot play any effect - if (_vecPlayableEffectInd.empty()) - { - cout << "\nNo playable effects.\n"; - return; - } - - // If no effect selected, and next or previous requested, select the first one. - if (eWhich != eNone && _nCurrEffectInd < 0) - _nCurrEffectInd = 0; - - // Otherwise, remove the current one from the device, - // and then select the requested one if any. - else if (_nCurrEffectInd >= 0) - { - _pJoystickMgr->getCurrentFFDevice() - ->remove(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); - _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(false); - _nCurrEffectInd += eWhich; - if (_nCurrEffectInd < -1 || _nCurrEffectInd >= (int)_vecPlayableEffectInd.size()) - _nCurrEffectInd = -1; - } - - // If no effect must be selected, reset the selection index - if (eWhich == eNone) - { - _nCurrEffectInd = -1; - } - - // Otherwise, upload the new selected effect to the device if any. - else if (_nCurrEffectInd >= 0) - { - _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->setActive(true); - _pJoystickMgr->getCurrentFFDevice() - ->upload(_vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->getFFEffect()); - } - } - - void printEffect(size_t nEffInd) - { - cout << "* #" << nEffInd << " : " << _vecEffects[nEffInd]->getDescription() << endl; - } - - void printEffects() - { - for (size_t nEffInd = 0; nEffInd < _vecEffects.size(); nEffInd++) - printEffect(nEffInd); - } - - string toString() const - { - ostringstream oss; - oss << "DevMem: " << setiosflags(ios_base::right) << setw(3); - - //This causes constant exceptions with my device. Not needed for anything other than debugging - //if (_pJoystickMgr->getCurrentFFDevice()) - // oss << _pJoystickMgr->getCurrentFFDevice()->getFFMemoryLoad() << "%"; - //else - // oss << "----"; - - oss << " Effect:" << setw(2); - if (_nCurrEffectInd >= 0) - oss << _vecPlayableEffectInd[_nCurrEffectInd] - << " " << _vecEffects[_vecPlayableEffectInd[_nCurrEffectInd]]->toString(); - else - oss << "--"; - return oss.str(); - } -}; - -//////////// Application class //////////////////////////////////////////////////////// - -class Application -{ - protected: - InputManager* _pInputMgr; - EventHandler* _pEventHdlr; - Keyboard* _pKeyboard; - JoystickManager* _pJoystickMgr; - EffectManager* _pEffectMgr; - -#if defined OIS_WIN32_PLATFORM - HWND _hWnd; -#elif defined OIS_LINUX_PLATFORM - Display* _pXDisp; - Window _xWin; -#endif - - bool _bMustStop; - bool _bIsInitialized; - - int _nStatus; - - // App. hart beat frequency. - static const unsigned int _nHartBeatFreq = 20; // Hz - - // Effects update frequency (Hz) : Needs to be quite lower than app. hart beat frequency, - // if we want to be able to calmly study effect changes ... - static const unsigned int _nEffectUpdateFreq = 1; // Hz - - public: - - Application(int argc, const char* argv[]) - { - _pInputMgr = 0; - _pEventHdlr = 0; - _pKeyboard = 0; - _pJoystickMgr = 0; - _pEffectMgr = 0; - -#if defined OIS_WIN32_PLATFORM - _hWnd = 0; -#elif defined OIS_LINUX_PLATFORM - _pXDisp = 0; - _xWin = 0; -#endif - - _bMustStop = false; - - _bIsInitialized = false; - _nStatus = 0; - } - - int initialize() - { - ostringstream wnd; - -#if defined OIS_WIN32_PLATFORM - - //Create a capture window for Input Grabbing - _hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); - if( _hWnd == NULL ) - OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); - - ShowWindow(_hWnd, SW_SHOW); - - wnd << (size_t)_hWnd; - -#elif defined OIS_LINUX_PLATFORM - - //Connects to default X window - if( !(_pXDisp = XOpenDisplay(0)) ) - OIS_EXCEPT(E_General, "Error opening X!"); - - //Create a window - _xWin = XCreateSimpleWindow(_pXDisp,DefaultRootWindow(_pXDisp), 0,0, 100,100, 0, 0, 0); - - //bind our connection to that window - XMapWindow(_pXDisp, _xWin); - - //Select what events we want to listen to locally - XSelectInput(_pXDisp, _xWin, StructureNotifyMask); - - //Wait for Window to show up - XEvent event; - do { XNextEvent(_pXDisp, &event); } while(event.type != MapNotify); - - wnd << _xWin; - -#endif - - // Create OIS input manager - ParamList pl; - pl.insert(make_pair(string("WINDOW"), wnd.str())); - _pInputMgr = InputManager::createInputSystem(pl); - cout << _pInputMgr->inputSystemName() << " created." << endl; - - // Create the event handler. - _pEventHdlr = new EventHandler(this); - - // Create a simple keyboard - _pKeyboard = (Keyboard*)_pInputMgr->createInputObject( OISKeyboard, true ); - _pKeyboard->setEventCallback( _pEventHdlr ); - - // Create the joystick manager. - _pJoystickMgr = new JoystickManager(_pInputMgr, _pEventHdlr); - if( !_pJoystickMgr->wasFFDetected() ) - { - cout << "No Force Feedback device detected." << endl; - _nStatus = 1; - return _nStatus; - } - - // Create force feedback effect manager. - _pEffectMgr = new EffectManager(_pJoystickMgr, _nEffectUpdateFreq); - - // Initialize the event handler. - _pEventHdlr->initialize(_pJoystickMgr, _pEffectMgr); - - _bIsInitialized = true; - - return _nStatus; - } - -#if defined OIS_LINUX_PLATFORM - - // This is just here to show that you still receive x11 events, - // as the lib only needs mouse/key events - void checkX11Events() - { - XEvent event; - - //Poll x11 for events - while( XPending(_pXDisp) > 0 ) - { - XNextEvent(_pXDisp, &event); - } - } -#endif - - int run() - { - const unsigned int nMaxEffectUpdateCnt = _nHartBeatFreq / _nEffectUpdateFreq; - unsigned int nEffectUpdateCnt = 0; - - // Initailize app. if not already done, and exit if something went wrong. - if (!_bIsInitialized) - initialize(); - - if (!_bIsInitialized) - return _nStatus; - - try - { - //Main polling loop - while(!_bMustStop) - { - // This fires off buffered events for keyboards - _pKeyboard->capture(); - - // This fires off buffered events for each joystick we have - _pJoystickMgr->captureEvents(); - - // Update currently selected effects if time has come to. - if (!nEffectUpdateCnt) - { - _pEffectMgr->updateActiveEffects(); - nEffectUpdateCnt = nMaxEffectUpdateCnt; - } - else - nEffectUpdateCnt--; - - // Update state line. - cout << "\r" << _pJoystickMgr->toString() << " " << _pEffectMgr->toString() - << " "; - - //Throttle down CPU usage & handle OS events -#if defined OIS_WIN32_PLATFORM - Sleep( (DWORD)(1000.0/_nHartBeatFreq) ); - MSG msg; - while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) - { - TranslateMessage( &msg ); - DispatchMessage( &msg ); - } -#elif defined OIS_LINUX_PLATFORM - checkX11Events(); - usleep(1000000.0/_nHartBeatFreq); -#endif - } - } - catch( const Exception &ex ) - { -#if defined OIS_WIN32_PLATFORM - MessageBox(0, ex.eText, "Exception Raised!", MB_OK); -#else - cout << endl << "OIS Exception Caught!" << endl - << "\t" << ex.eText << "[Line " << ex.eLine << " in " << ex.eFile << "]" << endl; -#endif - } - - terminate(); - - return _nStatus; - } - - void stop() - { - _bMustStop = true; - } - - void terminate() - { - if (_pInputMgr) - { - _pInputMgr->destroyInputObject( _pKeyboard ); - _pKeyboard = 0; - if (_pJoystickMgr) - { - delete _pJoystickMgr; - _pJoystickMgr = 0; - } - InputManager::destroyInputSystem(_pInputMgr); - _pInputMgr = 0; - } - if (_pEffectMgr) - { - delete _pEffectMgr; - _pEffectMgr = 0; - } - if (_pEventHdlr) - { - delete _pEventHdlr; - _pEventHdlr = 0; - } - -#if defined OIS_LINUX_PLATFORM - // Be nice to X and clean up the x window - XDestroyWindow(_pXDisp, _xWin); - XCloseDisplay(_pXDisp); -#endif - } - - JoystickManager* getJoystickManager() - { - return _pJoystickMgr; - } - - EffectManager* getEffectManager() - { - return _pEffectMgr; - } - - void printHelp() - { - cout << endl - << "Keyboard actions :" << endl - << "* Escape : Exit App" << endl - << "* H : This help menu" << endl - << "* Right/Left : Select next/previous joystick among the FF capable detected ones" << endl - << "* Up/Down : Select next/previous effect for the selected joystick" << endl - << "* PgUp/PgDn : Increase/decrease from 5% the master gain " - << "for all the joysticks" << endl - << "* Space : Toggle auto-centering on all the joysticks" << endl; - if (_bIsInitialized) - { - cout << endl << "Implemented effects :" << endl << endl; - _pEffectMgr->printEffects(); - cout << endl; - } - } -}; - -//////////// Event handler class definition //////////////////////////////////////////////// - -EventHandler::EventHandler(Application* pApp) -: _pApplication(pApp) -{} - -void EventHandler::initialize(JoystickManager* pJoystickMgr, EffectManager* pEffectMgr) -{ - _pJoystickMgr = pJoystickMgr; - _pEffectMgr = pEffectMgr; -} - -bool EventHandler::keyPressed( const KeyEvent &arg ) -{ - switch (arg.key) - { - // Quit. - case KC_ESCAPE: - _pApplication->stop(); - break; - - // Help. - case KC_H: - _pApplication->printHelp(); - break; - - // Change current joystick. - case KC_RIGHT: - _pEffectMgr->selectEffect(EffectManager::eNone); - _pJoystickMgr->selectJoystick(JoystickManager::eNext); - _pEffectMgr->checkPlayableEffects(); - break; - case KC_LEFT: - _pEffectMgr->selectEffect(EffectManager::eNone); - _pJoystickMgr->selectJoystick(JoystickManager::ePrevious); - _pEffectMgr->checkPlayableEffects(); - break; - - // Change current effect. - case KC_UP: - _pEffectMgr->selectEffect(EffectManager::eNext); - break; - case KC_DOWN: - _pEffectMgr->selectEffect(EffectManager::ePrevious); - break; - - // Change current master gain. - case KC_PGUP: - _pJoystickMgr->changeMasterGain(5.0); // Percent - break; - case KC_PGDOWN: - _pJoystickMgr->changeMasterGain(-5.0); // Percent - break; - - // Toggle auto-center mode. - case KC_SPACE: - _pJoystickMgr->changeAutoCenter(); - break; - - default: - cout << "Non mapped key: " << arg.key << endl; - } - return true; -} - -bool EventHandler::keyReleased( const KeyEvent &arg ) -{ - return true; -} - -bool EventHandler::buttonPressed( const JoyStickEvent &arg, int button ) -{ - return true; -} -bool EventHandler::buttonReleased( const JoyStickEvent &arg, int button ) -{ - return true; -} -bool EventHandler::axisMoved( const JoyStickEvent &arg, int axis ) -{ - return true; -} -bool EventHandler::povMoved( const JoyStickEvent &arg, int pov ) -{ - return true; -} - -//========================================================================================== -int main(int argc, const char* argv[]) -{ - - cout << endl - << "This is a simple command line Force Feedback testing demo ..." << endl - << "All connected joystick devices will be created and if FF Support is found," << endl - << "you'll be able to play some predefined variable effects on them." << endl << endl - << "Note: 1 effect can be played on 1 joystick at a time for the moment." << endl << endl; - - Application app(argc, argv); - - int status = app.initialize(); - - if (!status) - { - app.printHelp(); - - status = app.run(); - } - - cout << endl << endl << "Exiting ..." << endl << endl; - -#if defined OIS_WIN32_PLATFORM && _DEBUG - cout << "Click on this window and ..." << endl; - system("pause"); -#endif - - exit(status); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am deleted file mode 100644 index 926f7f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/Makefile.am +++ /dev/null @@ -1,11 +0,0 @@ -INCLUDES = $(STLPORT_CFLAGS) -I$(top_srcdir)/includes $(CFLAGS) -I/usr/X11R6/include - -noinst_PROGRAMS = ConsoleApp FFConsoleTest - -ConsoleApp_SOURCES = OISConsole.cpp -ConsoleApp_LDFLAGS = -L$(top_builddir)/src -ConsoleApp_LDADD = -lOIS -lX11 -lXext - -FFConsoleTest_SOURCES = FFConsoleDemo.cpp -FFConsoleTest_LDFLAGS = -L$(top_builddir)/src -FFConsoleTest_LDADD = -lOIS -lX11 -lXext diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp deleted file mode 100644 index 0850004..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/demos/OISConsole.cpp +++ /dev/null @@ -1,459 +0,0 @@ -//////////////////////////////// OS Nuetral Headers //////////////// -#include "OISInputManager.h" -#include "OISException.h" -#include "OISKeyboard.h" -#include "OISMouse.h" -#include "OISJoyStick.h" -#include "OISEvents.h" - -//Advanced Usage -#include "OISForceFeedback.h" - -#include -#include -#include - -////////////////////////////////////Needed Windows Headers//////////// -#if defined OIS_WIN32_PLATFORM -# define WIN32_LEAN_AND_MEAN -# include "windows.h" -# ifdef min -# undef min -# endif -# include "resource.h" - LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ); -////////////////////////////////////////////////////////////////////// -////////////////////////////////////Needed Linux Headers////////////// -#elif defined OIS_LINUX_PLATFORM -# include -# include - void checkX11Events(); -////////////////////////////////////////////////////////////////////// -////////////////////////////////////Needed Mac Headers////////////// -#elif defined OIS_APPLE_PLATFORM -# include - void checkMacEvents(); -#endif -////////////////////////////////////////////////////////////////////// -using namespace OIS; - -//-- Some local prototypes --// -void doStartup(); -void handleNonBufferedKeys(); -void handleNonBufferedMouse(); -void handleNonBufferedJoy( JoyStick* js ); - -//-- Easy access globals --// -bool appRunning = true; //Global Exit Flag - -const char *g_DeviceType[6] = {"OISUnknown", "OISKeyboard", "OISMouse", "OISJoyStick", - "OISTablet", "OISOther"}; - -InputManager *g_InputManager = 0; //Our Input System -Keyboard *g_kb = 0; //Keyboard Device -Mouse *g_m = 0; //Mouse Device -JoyStick* g_joys[4] = {0,0,0,0}; //This demo supports up to 4 controllers - -//-- OS Specific Globals --// -#if defined OIS_WIN32_PLATFORM - HWND hWnd = 0; -#elif defined OIS_LINUX_PLATFORM - Display *xDisp = 0; - Window xWin = 0; -#elif defined OIS_APPLE_PLATFORM - WindowRef mWin = 0; -#endif - -//////////// Common Event handler class //////// -class EventHandler : public KeyListener, public MouseListener, public JoyStickListener -{ -public: - EventHandler() {} - ~EventHandler() {} - bool keyPressed( const KeyEvent &arg ) { - std::cout << " KeyPressed {" << arg.key - << ", " << ((Keyboard*)(arg.device))->getAsString(arg.key) - << "} || Character (" << (char)arg.text << ")" << std::endl; - return true; - } - bool keyReleased( const KeyEvent &arg ) { - if( arg.key == KC_ESCAPE || arg.key == KC_Q ) - appRunning = false; - std::cout << "KeyReleased {" << ((Keyboard*)(arg.device))->getAsString(arg.key) << "}\n"; - return true; - } - bool mouseMoved( const MouseEvent &arg ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouseMoved: Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool mousePressed( const MouseEvent &arg, MouseButtonID id ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouse button #" << id << " pressed. Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool mouseReleased( const MouseEvent &arg, MouseButtonID id ) { - const OIS::MouseState& s = arg.state; - std::cout << "\nMouse button #" << id << " released. Abs(" - << s.X.abs << ", " << s.Y.abs << ", " << s.Z.abs << ") Rel(" - << s.X.rel << ", " << s.Y.rel << ", " << s.Z.rel << ")"; - return true; - } - bool buttonPressed( const JoyStickEvent &arg, int button ) { - std::cout << std::endl << arg.device->vendor() << ". Button Pressed # " << button; - return true; - } - bool buttonReleased( const JoyStickEvent &arg, int button ) { - std::cout << std::endl << arg.device->vendor() << ". Button Released # " << button; - return true; - } - bool axisMoved( const JoyStickEvent &arg, int axis ) - { - //Provide a little dead zone - if( arg.state.mAxes[axis].abs > 2500 || arg.state.mAxes[axis].abs < -2500 ) - std::cout << std::endl << arg.device->vendor() << ". Axis # " << axis << " Value: " << arg.state.mAxes[axis].abs; - return true; - } - bool povMoved( const JoyStickEvent &arg, int pov ) - { - std::cout << std::endl << arg.device->vendor() << ". POV" << pov << " "; - - if( arg.state.mPOV[pov].direction & Pov::North ) //Going up - std::cout << "North"; - else if( arg.state.mPOV[pov].direction & Pov::South ) //Going down - std::cout << "South"; - - if( arg.state.mPOV[pov].direction & Pov::East ) //Going right - std::cout << "East"; - else if( arg.state.mPOV[pov].direction & Pov::West ) //Going left - std::cout << "West"; - - if( arg.state.mPOV[pov].direction == Pov::Centered ) //stopped/centered out - std::cout << "Centered"; - return true; - } - - bool vector3Moved( const JoyStickEvent &arg, int index) - { - std::cout.precision(2); - std::cout.flags(std::ios::fixed | std::ios::right); - std::cout << std::endl << arg.device->vendor() << ". Orientation # " << index - << " X Value: " << arg.state.mVectors[index].x - << " Y Value: " << arg.state.mVectors[index].y - << " Z Value: " << arg.state.mVectors[index].z; - std::cout.precision(); - std::cout.flags(); - return true; - } -}; - -//Create a global instance -EventHandler handler; - -int main() -{ - std::cout << "\n\n*** OIS Console Demo App is starting up... *** \n"; - try - { - doStartup(); - std::cout << "\nStartup done... Hit 'q' or ESC to exit.\n\n"; - - while(appRunning) - { - //Throttle down CPU usage - #if defined OIS_WIN32_PLATFORM - Sleep(90); - MSG msg; - while( PeekMessage( &msg, NULL, 0U, 0U, PM_REMOVE ) ) - { - TranslateMessage( &msg ); - DispatchMessage( &msg ); - } - #elif defined OIS_LINUX_PLATFORM - checkX11Events(); - usleep( 500 ); - #elif defined OIS_APPLE_PLATFORM - checkMacEvents(); - usleep( 500 ); - #endif - - if( g_kb ) - { - g_kb->capture(); - if( !g_kb->buffered() ) - handleNonBufferedKeys(); - } - - if( g_m ) - { - g_m->capture(); - if( !g_m->buffered() ) - handleNonBufferedMouse(); - } - - for( int i = 0; i < 4 ; ++i ) - { - if( g_joys[i] ) - { - g_joys[i]->capture(); - if( !g_joys[i]->buffered() ) - handleNonBufferedJoy( g_joys[i] ); - } - } - } - } - catch( const Exception &ex ) - { - #if defined OIS_WIN32_PLATFORM - MessageBox( NULL, ex.eText, "An exception has occurred!", MB_OK | - MB_ICONERROR | MB_TASKMODAL); - #else - std::cout << "\nOIS Exception Caught!\n" << "\t" << ex.eText << "[Line " - << ex.eLine << " in " << ex.eFile << "]\nExiting App"; - #endif - } - catch(std::exception &ex) - { - std::cout << "Caught std::exception: what = " << ex.what() << std::endl; - } - - //Destroying the manager will cleanup unfreed devices - if( g_InputManager ) - InputManager::destroyInputSystem(g_InputManager); - -#if defined OIS_LINUX_PLATFORM - // Be nice to X and clean up the x window - XDestroyWindow(xDisp, xWin); - XCloseDisplay(xDisp); -#endif - - std::cout << "\n\nGoodbye\n\n"; - return 0; -} - -void doStartup() -{ - ParamList pl; - -#if defined OIS_WIN32_PLATFORM - //Create a capture window for Input Grabbing - hWnd = CreateDialog( 0, MAKEINTRESOURCE(IDD_DIALOG1), 0,(DLGPROC)DlgProc); - if( hWnd == NULL ) - OIS_EXCEPT(E_General, "Failed to create Win32 Window Dialog!"); - - ShowWindow(hWnd, SW_SHOW); - - std::ostringstream wnd; - wnd << (size_t)hWnd; - - pl.insert(std::make_pair( std::string("WINDOW"), wnd.str() )); - - //Default mode is foreground exclusive..but, we want to show mouse - so nonexclusive -// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_FOREGROUND" ))); -// pl.insert(std::make_pair(std::string("w32_mouse"), std::string("DISCL_NONEXCLUSIVE"))); -#elif defined OIS_LINUX_PLATFORM - //Connects to default X window - if( !(xDisp = XOpenDisplay(0)) ) - OIS_EXCEPT(E_General, "Error opening X!"); - //Create a window - xWin = XCreateSimpleWindow(xDisp,DefaultRootWindow(xDisp), 0,0, 100,100, 0, 0, 0); - //bind our connection to that window - XMapWindow(xDisp, xWin); - //Select what events we want to listen to locally - XSelectInput(xDisp, xWin, StructureNotifyMask); - XEvent evtent; - do - { - XNextEvent(xDisp, &evtent); - } while(evtent.type != MapNotify); - - std::ostringstream wnd; - wnd << xWin; - - pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); - - //For this demo, show mouse and do not grab (confine to window) -// pl.insert(std::make_pair(std::string("x11_mouse_grab"), std::string("false"))); -// pl.insert(std::make_pair(std::string("x11_mouse_hide"), std::string("false"))); -#elif defined OIS_APPLE_PLATFORM - // create the window rect in global coords - ::Rect windowRect; - windowRect.left = 0; - windowRect.top = 0; - windowRect.right = 300; - windowRect.bottom = 300; - - // set the default attributes for the window - WindowAttributes windowAttrs = kWindowStandardDocumentAttributes - | kWindowStandardHandlerAttribute - | kWindowInWindowMenuAttribute - | kWindowHideOnFullScreenAttribute; - - // Create the window - CreateNewWindow(kDocumentWindowClass, windowAttrs, &windowRect, &mWin); - - // Color the window background black - SetThemeWindowBackground (mWin, kThemeBrushBlack, true); - - // Set the title of our window - CFStringRef titleRef = CFStringCreateWithCString( kCFAllocatorDefault, "OIS Input", kCFStringEncodingASCII ); - SetWindowTitleWithCFString( mWin, titleRef ); - - // Center our window on the screen - RepositionWindow( mWin, NULL, kWindowCenterOnMainScreen ); - - // Install the event handler for the window - InstallStandardEventHandler(GetWindowEventTarget(mWin)); - - // This will give our window focus, and not lock it to the terminal - ProcessSerialNumber psn = { 0, kCurrentProcess }; - TransformProcessType( &psn, kProcessTransformToForegroundApplication ); - SetFrontProcess(&psn); - - // Display and select our window - ShowWindow(mWin); - SelectWindow(mWin); - - std::ostringstream wnd; - wnd << (unsigned int)mWin; //cast to int so it gets encoded correctly (else it gets stored as a hex string) - std::cout << "WindowRef: " << mWin << " WindowRef as int: " << wnd.str() << "\n"; - pl.insert(std::make_pair(std::string("WINDOW"), wnd.str())); -#endif - - //This never returns null.. it will raise an exception on errors - g_InputManager = InputManager::createInputSystem(pl); - - //Lets enable all addons that were compiled in: - g_InputManager->enableAddOnFactory(InputManager::AddOn_All); - - //Print debugging information - unsigned int v = g_InputManager->getVersionNumber(); - std::cout << "OIS Version: " << (v>>16 ) << "." << ((v>>8) & 0x000000FF) << "." << (v & 0x000000FF) - << "\nRelease Name: " << g_InputManager->getVersionName() - << "\nManager: " << g_InputManager->inputSystemName() - << "\nTotal Keyboards: " << g_InputManager->getNumberOfDevices(OISKeyboard) - << "\nTotal Mice: " << g_InputManager->getNumberOfDevices(OISMouse) - << "\nTotal JoySticks: " << g_InputManager->getNumberOfDevices(OISJoyStick); - - //List all devices - DeviceList list = g_InputManager->listFreeDevices(); - for( DeviceList::iterator i = list.begin(); i != list.end(); ++i ) - std::cout << "\n\tDevice: " << g_DeviceType[i->first] << " Vendor: " << i->second; - - g_kb = (Keyboard*)g_InputManager->createInputObject( OISKeyboard, true ); - g_kb->setEventCallback( &handler ); - - g_m = (Mouse*)g_InputManager->createInputObject( OISMouse, true ); - g_m->setEventCallback( &handler ); - const MouseState &ms = g_m->getMouseState(); - ms.width = 100; - ms.height = 100; - - try - { - //This demo uses at most 4 joysticks - use old way to create (i.e. disregard vendor) - int numSticks = std::min(g_InputManager->getNumberOfDevices(OISJoyStick), 4); - for( int i = 0; i < numSticks; ++i ) - { - g_joys[i] = (JoyStick*)g_InputManager->createInputObject( OISJoyStick, true ); - g_joys[i]->setEventCallback( &handler ); - std::cout << "\n\nCreating Joystick " << (i + 1) - << "\n\tAxes: " << g_joys[i]->getNumberOfComponents(OIS_Axis) - << "\n\tSliders: " << g_joys[i]->getNumberOfComponents(OIS_Slider) - << "\n\tPOV/HATs: " << g_joys[i]->getNumberOfComponents(OIS_POV) - << "\n\tButtons: " << g_joys[i]->getNumberOfComponents(OIS_Button) - << "\n\tVector3: " << g_joys[i]->getNumberOfComponents(OIS_Vector3); - } - } - catch(OIS::Exception &ex) - { - std::cout << "\nException raised on joystick creation: " << ex.eText << std::endl; - } -} - -void handleNonBufferedKeys() -{ - if( g_kb->isKeyDown( KC_ESCAPE ) || g_kb->isKeyDown( KC_Q ) ) - appRunning = false; - - if( g_kb->isModifierDown(Keyboard::Shift) ) - std::cout << "Shift is down..\n"; - if( g_kb->isModifierDown(Keyboard::Alt) ) - std::cout << "Alt is down..\n"; - if( g_kb->isModifierDown(Keyboard::Ctrl) ) - std::cout << "Ctrl is down..\n"; -} - -void handleNonBufferedMouse() -{ - //Just dump the current mouse state - const MouseState &ms = g_m->getMouseState(); - std::cout << "\nMouse: Abs(" << ms.X.abs << " " << ms.Y.abs << " " << ms.Z.abs - << ") B: " << ms.buttons << " Rel(" << ms.X.rel << " " << ms.Y.rel << " " << ms.Z.rel << ")"; -} - -void handleNonBufferedJoy( JoyStick* js ) -{ - //Just dump the current joy state - const JoyStickState &joy = js->getJoyStickState(); - for( unsigned int i = 0; i < joy.mAxes.size(); ++i ) - std::cout << "\nAxis " << i << " X: " << joy.mAxes[i].abs; -} - -#if defined OIS_WIN32_PLATFORM -LRESULT DlgProc( HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam ) -{ - return FALSE; -} -#endif - -#if defined OIS_LINUX_PLATFORM -//This is just here to show that you still recieve x11 events, as the lib only needs mouse/key events -void checkX11Events() -{ - XEvent event; - - //Poll x11 for events (keyboard and mouse events are caught here) - while( XPending(xDisp) > 0 ) - { - XNextEvent(xDisp, &event); - //Handle Resize events - if( event.type == ConfigureNotify ) - { - if( g_m ) - { - const MouseState &ms = g_m->getMouseState(); - ms.width = event.xconfigure.width; - ms.height = event.xconfigure.height; - } - } - else if( event.type == DestroyNotify ) - { - std::cout << "Exiting...\n"; - appRunning = false; - } - else - std::cout << "\nUnknown X Event: " << event.type << std::endl; - } -} -#endif - -#if defined OIS_APPLE_PLATFORM -void checkMacEvents() -{ - //TODO - Check for window resize events, and then adjust the members of mousestate - EventRef event = NULL; - EventTargetRef targetWindow = GetEventDispatcherTarget(); - - if( ReceiveNextEvent( 0, NULL, kEventDurationNoWait, true, &event ) == noErr ) - { - SendEventToEventTarget(event, targetWindow); - std::cout << "Event : " << GetEventKind(event) << "\n"; - ReleaseEvent(event); - } -} -#endif diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp deleted file mode 100644 index 6e70213..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxForceFeedback.cpp +++ /dev/null @@ -1,563 +0,0 @@ -/* -The zlib/libpng License - -Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) - -This software is provided 'as-is', without any express or implied warranty. In no event will -the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial -applications, and to alter it and redistribute it freely, subject to the following -restrictions: - - 1. The origin of this software must not be misrepresented; you must not claim that - you wrote the original software. If you use this software in a product, - an acknowledgment in the product documentation would be appreciated but is - not required. - - 2. Altered source versions must be plainly marked as such, and must not be - misrepresented as being the original software. - - 3. This notice may not be removed or altered from any source distribution. -*/ -#include "linux/LinuxForceFeedback.h" -#include "OISException.h" - -#include -#include -#include - -#ifdef HAVE_UNISTD_H -#include -#endif - -using namespace OIS; - -// 0 = No trace; 1 = Important traces; 2 = Debug traces -#define OIS_LINUX_JOYFF_DEBUG 1 - -#ifdef OIS_LINUX_JOYFF_DEBUG -# include - using namespace std; -#endif - -//--------------------------------------------------------------// -LinuxForceFeedback::LinuxForceFeedback(int deviceID) : - ForceFeedback(), mJoyStick(deviceID) -{ -} - -//--------------------------------------------------------------// -LinuxForceFeedback::~LinuxForceFeedback() -{ - // Unload all effects. - for(EffectList::iterator i = mEffectList.begin(); i != mEffectList.end(); ++i ) - { - struct ff_effect *linEffect = i->second; - if( linEffect ) - _unload(linEffect->id); - } - - mEffectList.clear(); -} - -//--------------------------------------------------------------// -unsigned short LinuxForceFeedback::getFFMemoryLoad() -{ - int nEffects = -1; - if (ioctl(mJoyStick, EVIOCGEFFECTS, &nEffects) == -1) - OIS_EXCEPT(E_General, "Unknown error reading max number of uploaded effects."); -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << "LinuxForceFeedback("<< mJoyStick - << ") : Read device max number of uploaded effects : " << nEffects << endl; -#endif - - return (unsigned short int)(nEffects > 0 ? 100.0*mEffectList.size()/nEffects : 100); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::setMasterGain(float value) -{ - if (!mSetGainSupport) - { -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain " - << "is not supported by the device" << endl; -#endif - return; - } - - struct input_event event; - - memset(&event, 0, sizeof(event)); - event.type = EV_FF; - event.code = FF_GAIN; - if (value < 0.0) - value = 0.0; - else if (value > 1.0) - value = 1.0; - event.value = (__s32)(value * 0xFFFFUL); - -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting master gain to " - << value << " => " << event.value << endl; -#endif - - if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { - OIS_EXCEPT(E_General, "Unknown error changing master gain."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::setAutoCenterMode(bool enabled) -{ - if (!mSetAutoCenterSupport) - { -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Setting auto-center mode " - << "is not supported by the device" << endl; -#endif - return; - } - - struct input_event event; - - memset(&event, 0, sizeof(event)); - event.type = EV_FF; - event.code = FF_AUTOCENTER; - event.value = (__s32)(enabled*0xFFFFFFFFUL); - -#if (OIS_LINUX_JOYFF_DEBUG > 0) - cout << "LinuxForceFeedback("<< mJoyStick << ") : Toggling auto-center to " - << enabled << " => 0x" << hex << event.value << dec << endl; -#endif - - if (write(mJoyStick, &event, sizeof(event)) != sizeof(event)) { - OIS_EXCEPT(E_General, "Unknown error toggling auto-center."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::upload( const Effect* effect ) -{ - switch( effect->force ) - { - case OIS::Effect::ConstantForce: - _updateConstantEffect(effect); - break; - case OIS::Effect::ConditionalForce: - _updateConditionalEffect(effect); - break; - case OIS::Effect::PeriodicForce: - _updatePeriodicEffect(effect); - break; - case OIS::Effect::RampForce: - _updateRampEffect(effect); - break; - case OIS::Effect::CustomForce: - //_updateCustomEffect(effect); - //break; - default: - OIS_EXCEPT(E_NotImplemented, "Requested force not implemented yet, sorry!"); - break; - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::modify( const Effect* effect ) -{ - upload(effect); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::remove( const Effect* effect ) -{ - //Get the effect - if it exists - EffectList::iterator i = mEffectList.find(effect->_handle); - if( i != mEffectList.end() ) - { - struct ff_effect *linEffect = i->second; - if( linEffect ) - { - _stop(effect->_handle); - - _unload(effect->_handle); - - free(linEffect); - - mEffectList.erase(i); - } - else - mEffectList.erase(i); - } -} - -//--------------------------------------------------------------// -// To Signed16/Unsigned15 safe conversions -#define MaxUnsigned15Value 0x7FFF -#define toUnsigned15(value) \ - (__u16)((value) < 0 ? 0 : ((value) > MaxUnsigned15Value ? MaxUnsigned15Value : (value))) - -#define MaxSigned16Value 0x7FFF -#define MinSigned16Value -0x7FFF -#define toSigned16(value) \ - (__s16)((value) < MinSigned16Value ? MinSigned16Value : ((value) > MaxSigned16Value ? MaxSigned16Value : (value))) - -// OIS to Linux duration -#define LinuxInfiniteDuration 0xFFFF -#define OISDurationUnitMS 1000 // OIS duration unit (microseconds), expressed in milliseconds (theLinux duration unit) - -// linux/input.h : All duration values are expressed in ms. Values above 32767 ms (0x7fff) -// should not be used and have unspecified results. -#define LinuxDuration(oisDuration) ((oisDuration) == Effect::OIS_INFINITE ? LinuxInfiniteDuration \ - : toUnsigned15((oisDuration)/OISDurationUnitMS)) - - -// OIS to Linux levels -#define OISMaxLevel 10000 -#define LinuxMaxLevel 0x7FFF - -// linux/input.h : Valid range for the attack and fade levels is 0x0000 - 0x7fff -#define LinuxPositiveLevel(oisLevel) toUnsigned15(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) - -#define LinuxSignedLevel(oisLevel) toSigned16(LinuxMaxLevel*(long)(oisLevel)/OISMaxLevel) - - -//--------------------------------------------------------------// -void LinuxForceFeedback::_setCommonProperties(struct ff_effect *event, - struct ff_envelope *ffenvelope, - const Effect* effect, const Envelope *envelope ) -{ - memset(event, 0, sizeof(struct ff_effect)); - - if (envelope && ffenvelope && envelope->isUsed()) { - ffenvelope->attack_length = LinuxDuration(envelope->attackLength); - ffenvelope->attack_level = LinuxPositiveLevel(envelope->attackLevel); - ffenvelope->fade_length = LinuxDuration(envelope->fadeLength); - ffenvelope->fade_level = LinuxPositiveLevel(envelope->fadeLevel); - } - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl; - if (envelope && ffenvelope) - { - cout << " Enveloppe :" << endl - << " AttackLen : " << envelope->attackLength - << " => " << ffenvelope->attack_length << endl - << " AttackLvl : " << envelope->attackLevel - << " => " << ffenvelope->attack_level << endl - << " FadeLen : " << envelope->fadeLength - << " => " << ffenvelope->fade_length << endl - << " FadeLvl : " << envelope->fadeLevel - << " => " << ffenvelope->fade_level << endl; - } -#endif - - event->direction = (__u16)(1 + (effect->direction*45.0+135.0)*0xFFFFUL/360.0); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Direction : " << Effect::getDirectionName(effect->direction) - << " => 0x" << hex << event->direction << dec << endl; -#endif - - // TODO trigger_button 0 vs. -1 - event->trigger.button = effect->trigger_button; // < 0 ? 0 : effect->trigger_button; - event->trigger.interval = LinuxDuration(effect->trigger_interval); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Trigger :" << endl - << " Button : " << effect->trigger_button - << " => " << event->trigger.button << endl - << " Interval : " << effect->trigger_interval - << " => " << event->trigger.interval << endl; -#endif - - event->replay.length = LinuxDuration(effect->replay_length); - event->replay.delay = LinuxDuration(effect->replay_delay); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Replay :" << endl - << " Length : " << effect->replay_length - << " => " << event->replay.length << endl - << " Delay : " << effect->replay_delay - << " => " << event->replay.delay << endl; -#endif -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateConstantEffect( const Effect* eff ) -{ - struct ff_effect event; - - ConstantEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); - - event.type = FF_CONSTANT; - event.id = -1; - - event.u.constant.level = LinuxSignedLevel(effect->level); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Level : " << effect->level - << " => " << event.u.constant.level << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateRampEffect( const Effect* eff ) -{ - struct ff_effect event; - - RampEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.constant.envelope, eff, &effect->envelope); - - event.type = FF_RAMP; - event.id = -1; - - event.u.ramp.start_level = LinuxSignedLevel(effect->startLevel); - event.u.ramp.end_level = LinuxSignedLevel(effect->endLevel); - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " StartLevel : " << effect->startLevel - << " => " << event.u.ramp.start_level << endl - << " EndLevel : " << effect->endLevel - << " => " << event.u.ramp.end_level << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updatePeriodicEffect( const Effect* eff ) -{ - struct ff_effect event; - - PeriodicEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, &event.u.periodic.envelope, eff, &effect->envelope); - - event.type = FF_PERIODIC; - event.id = -1; - - switch( eff->type ) - { - case OIS::Effect::Square: - event.u.periodic.waveform = FF_SQUARE; - break; - case OIS::Effect::Triangle: - event.u.periodic.waveform = FF_TRIANGLE; - break; - case OIS::Effect::Sine: - event.u.periodic.waveform = FF_SINE; - break; - case OIS::Effect::SawToothUp: - event.u.periodic.waveform = FF_SAW_UP; - break; - case OIS::Effect::SawToothDown: - event.u.periodic.waveform = FF_SAW_DOWN; - break; - // Note: No support for Custom periodic force effect for the moment - //case OIS::Effect::Custom: - //event.u.periodic.waveform = FF_CUSTOM; - //break; - default: - OIS_EXCEPT(E_General, "No such available effect for Periodic force!"); - break; - } - - event.u.periodic.period = LinuxDuration(effect->period); - event.u.periodic.magnitude = LinuxPositiveLevel(effect->magnitude); - event.u.periodic.offset = LinuxPositiveLevel(effect->offset); - event.u.periodic.phase = (__u16)(effect->phase*event.u.periodic.period/36000.0); // ????? - - // Note: No support for Custom periodic force effect for the moment - event.u.periodic.custom_len = 0; - event.u.periodic.custom_data = 0; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Magnitude : " << effect->magnitude - << " => " << event.u.periodic.magnitude << endl - << " Period : " << effect->period - << " => " << event.u.periodic.period << endl - << " Offset : " << effect->offset - << " => " << event.u.periodic.offset << endl - << " Phase : " << effect->phase - << " => " << event.u.periodic.phase << endl; -#endif - - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_updateConditionalEffect( const Effect* eff ) -{ - struct ff_effect event; - - ConditionalEffect *effect = static_cast(eff->getForceEffect()); - - _setCommonProperties(&event, NULL, eff, NULL); - - switch( eff->type ) - { - case OIS::Effect::Friction: - event.type = FF_FRICTION; - break; - case OIS::Effect::Damper: - event.type = FF_DAMPER; - break; - case OIS::Effect::Inertia: - event.type = FF_INERTIA; - break; - case OIS::Effect::Spring: - event.type = FF_SPRING; - break; - default: - OIS_EXCEPT(E_General, "No such available effect for Conditional force!"); - break; - } - - event.id = -1; - - event.u.condition[0].right_saturation = LinuxSignedLevel(effect->rightSaturation); - event.u.condition[0].left_saturation = LinuxSignedLevel(effect->leftSaturation); - event.u.condition[0].right_coeff = LinuxSignedLevel(effect->rightCoeff); - event.u.condition[0].left_coeff = LinuxSignedLevel(effect->leftCoeff); - event.u.condition[0].deadband = LinuxPositiveLevel(effect->deadband);// Unit ?? - event.u.condition[0].center = LinuxSignedLevel(effect->center); // Unit ?? TODO ? - - // TODO support for second condition - event.u.condition[1] = event.u.condition[0]; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << " Condition[0] : " << endl - << " RightSaturation : " << effect->rightSaturation - << " => " << event.u.condition[0].right_saturation << endl - << " LeftSaturation : " << effect->leftSaturation - << " => " << event.u.condition[0]. left_saturation << endl - << " RightCoefficient : " << effect->rightCoeff - << " => " << event.u.condition[0].right_coeff << endl - << " LeftCoefficient : " << effect->leftCoeff - << " => " << event.u.condition[0].left_coeff << endl - << " DeadBand : " << effect->deadband - << " => " << event.u.condition[0].deadband << endl - << " Center : " << effect->center - << " => " << event.u.condition[0].center << endl; - cout << " Condition[1] : Not implemented" << endl; -#endif - _upload(&event, eff); -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_upload( struct ff_effect* ffeffect, const Effect* effect) -{ - struct ff_effect *linEffect = 0; - - //Get the effect - if it exists - EffectList::iterator i = mEffectList.find(effect->_handle); - //It has been created already - if( i != mEffectList.end() ) - linEffect = i->second; - - if( linEffect == 0 ) - { -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Adding new effect : " - << Effect::getEffectTypeName(effect->type) << endl; -#endif - - //This effect has not yet been created, so create it in the device - if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { - // TODO device full check - // OIS_EXCEPT(E_DeviceFull, "Remove an effect before adding more!"); - OIS_EXCEPT(E_General, "Unknown error creating effect (may be the device is full)->.."); - } - - // Save returned effect handle - effect->_handle = ffeffect->id; - - // Save a copy of the uploaded effect for later simple modifications - linEffect = (struct ff_effect *)calloc(1, sizeof(struct ff_effect)); - memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); - - mEffectList[effect->_handle] = linEffect; - - // Start playing the effect. - _start(effect->_handle); - } - else - { -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick << ") : Replacing effect : " - << Effect::getEffectTypeName(effect->type) << endl; -#endif - - // Keep same id/handle, as this is just an update in the device. - ffeffect->id = effect->_handle; - - // Update effect in the device. - if (ioctl(mJoyStick, EVIOCSFF, ffeffect) == -1) { - OIS_EXCEPT(E_General, "Unknown error updating an effect->.."); - } - - // Update local linEffect for next time. - memcpy(linEffect, ffeffect, sizeof(struct ff_effect)); - } - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << "LinuxForceFeedback("<< mJoyStick - << ") : Effect handle : " << effect->_handle << endl; -#endif -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_stop( int handle) { - struct input_event stop; - - stop.type = EV_FF; - stop.code = handle; - stop.value = 0; - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Stopping effect with handle " << handle << endl; -#endif - - if (write(mJoyStick, &stop, sizeof(stop)) != sizeof(stop)) { - OIS_EXCEPT(E_General, "Unknown error stopping effect->.."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_start( int handle) { - struct input_event play; - - play.type = EV_FF; - play.code = handle; - play.value = 1; // Play once. - -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Starting effect with handle " << handle << endl; -#endif - - if (write(mJoyStick, &play, sizeof(play)) != sizeof(play)) { - OIS_EXCEPT(E_General, "Unknown error playing effect->.."); - } -} - -//--------------------------------------------------------------// -void LinuxForceFeedback::_unload( int handle) -{ -#if (OIS_LINUX_JOYFF_DEBUG > 1) - cout << endl << "LinuxForceFeedback("<< mJoyStick - << ") : Removing effect with handle " << handle << endl; -#endif - - if (ioctl(mJoyStick, EVIOCRMFF, handle) == -1) { - OIS_EXCEPT(E_General, "Unknown error removing effect->.."); - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp deleted file mode 100644 index 87dd977..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/ois/src/linux/LinuxJoyStickEvents.cpp +++ /dev/null @@ -1,308 +0,0 @@ -/* -The zlib/libpng License - -Copyright (c) 2005-2007 Phillip Castaneda (pjcast -- www.wreckedgames.com) - -This software is provided 'as-is', without any express or implied warranty. In no event will -the authors be held liable for any damages arising from the use of this software. - -Permission is granted to anyone to use this software for any purpose, including commercial -applications, and to alter it and redistribute it freely, subject to the following -restrictions: - - 1. The origin of this software must not be misrepresented; you must not claim that - you wrote the original software. If you use this software in a product, - an acknowledgment in the product documentation would be appreciated but is - not required. - - 2. Altered source versions must be plainly marked as such, and must not be - misrepresented as being the original software. - - 3. This notice may not be removed or altered from any source distribution. -*/ -#include "OISConfig.h" - -#include "linux/LinuxJoyStickEvents.h" -#include "linux/LinuxInputManager.h" -#include "linux/LinuxForceFeedback.h" -#include "linux/EventHelpers.h" - -#include "OISEvents.h" -#include "OISException.h" - -#include //Needed to Open a file descriptor -#ifdef HAVE_UNISTD_H -#include -#endif -#include -#include - - -#include -# include -using namespace std; - -using namespace OIS; - -//#define OIS_LINUX_JOY_DEBUG - -//-------------------------------------------------------------------// -LinuxJoyStick::LinuxJoyStick(InputManager* creator, bool buffered, const JoyStickInfo& js) - : JoyStick(js.vendor, buffered, js.devId, creator) -{ - mJoyStick = js.joyFileD; - - mState.mAxes.clear(); - mState.mAxes.resize(js.axes); - mState.mButtons.clear(); - mState.mButtons.resize(js.buttons); - - mPOVs = js.hats; - - mButtonMap = js.button_map; - mAxisMap = js.axis_map; - mRanges = js.axis_range; - - ff_effect = 0; -} - -//-------------------------------------------------------------------// -LinuxJoyStick::~LinuxJoyStick() -{ - EventUtils::removeForceFeedback( &ff_effect ); -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::_initialize() -{ - //Clear old joy state - mState.mAxes.resize(mAxisMap.size()); - mState.clear(); - - //This will create and new us a force feedback structure if it exists - EventUtils::enumerateForceFeedback( mJoyStick, &ff_effect ); - - if( mJoyStick == -1 ) - OIS_EXCEPT(E_InputDeviceNonExistant, "LinuxJoyStick::_initialize() >> JoyStick Not Found!"); -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::capture() -{ - static const short POV_MASK[8] = {0,0,1,1,2,2,3,3}; - - //Used to determine if an axis has been changed and needs an event - bool axisMoved[32] = {false, false, false, false, false, false, false, false, false, false, false, false, false, - false, false, false, false, false, false, false, false, false, false, false, false, false, - false, false, false, false, false, false}; - - //We are in non blocking mode - we just read once, and try to fill up buffer - input_event js[JOY_BUFFERSIZE]; - while(true) - { - int ret = read(mJoyStick, &js, sizeof(struct input_event) * JOY_BUFFERSIZE); - if( ret < 0 ) - break; - - //Determine how many whole events re read up - ret /= sizeof(struct input_event); - for(int i = 0; i < ret; ++i) - { - switch(js[i].type) - { - case EV_KEY: //Button - { - int button = mButtonMap[js[i].code]; - - #ifdef OIS_LINUX_JOY_DEBUG - cout << "\nButton Code: " << js[i].code << ", OIS Value: " << button << endl; - #endif - - //Check to see whether push or released event... - if(js[i].value) - { - mState.mButtons[button] = true; - if( mBuffered && mListener ) - if(!mListener->buttonPressed(JoyStickEvent(this,mState), button)) return; - } - else - { - mState.mButtons[button] = false; - if( mBuffered && mListener ) - if(!mListener->buttonReleased(JoyStickEvent(this,mState), button)) return; - } - break; - } - - case EV_ABS: //Absolute Axis - { - //A Stick (BrakeDefine is the highest possible Axis) - if( js[i].code <= ABS_BRAKE ) - { - int axis = mAxisMap[js[i].code]; - assert( axis < 32 && "Too many axes (Max supported is 32). Report this to OIS forums!" ); - - axisMoved[axis] = true; - - //check for rescaling: - if( mRanges[axis].min == JoyStick::MIN_AXIS && mRanges[axis].max != JoyStick::MAX_AXIS ) - { //Scale is perfect - mState.mAxes[axis].abs = js[i].value; - } - else - { //Rescale - float proportion = (float)(js[i].value-mRanges[axis].max)/(float)(mRanges[axis].min-mRanges[axis].max); - mState.mAxes[axis].abs = (int)(32767.0f - (65535.0f * proportion)); - } - } - else if( js[i].code <= ABS_HAT3Y ) //A POV - Max four POVs allowed - { - //Normalise the POV to between 0-7 - //Even is X Axis, Odd is Y Axis - unsigned char LinuxPovNumber = js[i].code - 16; - short OIS_POVIndex = POV_MASK[LinuxPovNumber]; - - //Handle X Axis first (Even) (left right) - if((LinuxPovNumber & 0x0001) == 0) - { - //Why do this? Because, we use a bit field, and when this axis is east, - //it can't possibly be west too. So clear out the two X axes, then refil - //it in with the new direction bit. - //Clear the East/West Bit Flags first - mState.mPOV[OIS_POVIndex].direction &= 0x11110011; - if( js[i].value == -1 ) //Left - mState.mPOV[OIS_POVIndex].direction |= Pov::West; - else if( js[i].value == 1 ) //Right - mState.mPOV[OIS_POVIndex].direction |= Pov::East; - } - //Handle Y Axis (Odd) (up down) - else - { - //Clear the North/South Bit Flags first - mState.mPOV[OIS_POVIndex].direction &= 0x11111100; - if( js[i].value == -1 ) //Up - mState.mPOV[OIS_POVIndex].direction |= Pov::North; - else if( js[i].value == 1 ) //Down - mState.mPOV[OIS_POVIndex].direction |= Pov::South; - } - - if( mBuffered && mListener ) - if( mListener->povMoved( JoyStickEvent(this,mState), OIS_POVIndex) == false ) - return; - } - break; - } - - - case EV_REL: //Relative Axes (Do any joystick actually have a relative axis?) - #ifdef OIS_LINUX_JOY_DEBUG - cout << "\nWarning: Relatives axes not supported yet" << endl; - #endif - break; - default: break; - } - } - } - - //All axes and POVs are combined into one movement per pair per captured frame - if( mBuffered && mListener ) - { - for( int i = 0; i < 32; ++i ) - if( axisMoved[i] ) - if( mListener->axisMoved( JoyStickEvent(this,mState), i) == false ) - return; - } -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::setBuffered(bool buffered) -{ - if( buffered != mBuffered ) - { - mBuffered = buffered; - _initialize(); - } -} - -//-------------------------------------------------------------------// -JoyStickInfo LinuxJoyStick::_getJoyInfo() -{ - JoyStickInfo js; - - js.devId = mDevID; - js.joyFileD = mJoyStick; - js.vendor = mVendor; - js.axes = (int)mState.mAxes.size(); - js.buttons = (int)mState.mButtons.size(); - js.hats = mPOVs; - js.button_map = mButtonMap; - js.axis_map = mAxisMap; - js.axis_range = mRanges; - - return js; -} - -//-------------------------------------------------------------------// -JoyStickInfoList LinuxJoyStick::_scanJoys() -{ - JoyStickInfoList joys; - - //Search through all of the event devices.. and identify which ones are joysticks - //xxx move this to InputManager, as it can also scan all other events - for(int i = 0; i < 64; ++i ) - { - stringstream s; - s << "/dev/input/event" << i; - int fd = open( s.str().c_str(), O_RDWR |O_NONBLOCK ); - if(fd == -1) - continue; - - #ifdef OIS_LINUX_JOY_DEBUG - cout << "Opening " << s.str() << "..." << endl; - #endif - try - { - JoyStickInfo js; - if( EventUtils::isJoyStick(fd, js) ) - { - joys.push_back(js); - #ifdef OIS_LINUX_JOY_DEBUG - cout << "=> Joystick added to list." << endl; - #endif - } - else - { - #ifdef OIS_LINUX_JOY_DEBUG - cout << "=> Not a joystick." << endl; - #endif - close(fd); - } - } - catch(...) - { - #ifdef OIS_LINUX_JOY_DEBUG - cout << "Exception caught!!" << endl; - #endif - close(fd); - } - } - - return joys; -} - -//-------------------------------------------------------------------// -void LinuxJoyStick::_clearJoys(JoyStickInfoList &joys) -{ - for(JoyStickInfoList::iterator i = joys.begin(); i != joys.end(); ++i) - close(i->joyFileD); - joys.clear(); -} - -//-------------------------------------------------------------------// -Interface* LinuxJoyStick::queryInterface(Interface::IType type) -{ - if( ff_effect && type == Interface::ForceFeedback ) - return ff_effect; - - return 0; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt deleted file mode 100644 index 65ae11f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/oxygine/CMakeLists.txt +++ /dev/null @@ -1,546 +0,0 @@ -# cmake_minimum_required (VERSION 2.6) -# project (OXYGINE) -# -# include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") -# cmaki_find_package(sdl2 REQUIRED) -# cmaki_find_package(freeimage REQUIRED) -# cmaki_find_package(dune-zlib REQUIRED) -# cmaki_find_package(haxx-libcurl REQUIRED) -# -# include_directories(${CMAKI_INCLUDE_DIRS}) -# set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) -# -# if (EMSCRIPTEN) -# #don't need SDL2 -# elseif (WIN32) -# #hardcoded path to SDL2 on windows -# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# else(WIN32) -# # find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) -# # message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) -# # -# # if (SDL2_INCLUDE_DIRS) -# # set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) -# # message(STATUS "found") -# # else() -# # message(STATUS "SDL notfound") -# # set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# # endif() -# -# find_package(CURL) -# endif(EMSCRIPTEN) -# -# -# set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) -# set(OXYGINE_SRC ${OXYGINE_ROOT}/src) -# -# set(FOLDERS src src/closure src/minizip src/core -# src/core/gl src/dev_tools src/minizip -# src/math src/pugixml src/json src/res -# src/text_utils src/utils src/winnie_alloc) -# -# -# if (EMSCRIPTEN) -# set(PLATFORM emscripten) -# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") -# set(PLATFORM linux) -# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -# set(PLATFORM ios) -# elseif(MSVC) -# set(PLATFORM win32) -# elseif(MINGW) -# set(PLATFORM win32_mingw) -# endif() -# -# -# set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) -# -# -# -# if (EMSCRIPTEN) -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_HTTP 1) -# set(OX_USE_SDL2 0) -# -# set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) -# -# -# file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) -# -# elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") -# -# set(OX_HAVE_LIBJPEG 1) -# set(OX_HAVE_LIBPNG 1) -# -# elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -# elseif(MSVC) -# -# set(OX_HAVE_LIBJPEG 1) -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_LIBCURL 1) -# set(OX_HAVE_HTTP 1) -# -# set(libprefix lib) -# -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) -# -# elseif(MINGW) -# -# set(libprefix lib) -# -# set(OX_HAVE_LIBPNG 1) -# set(OX_HAVE_LIBCURL 1) -# set(OX_HAVE_HTTP 1) -# -# endif() -# -# if (OX_HAVE_LIBCURL) -# set(FOLDERS ${FOLDERS} src/core/curl) -# include_directories(${THIRD_PARTY}/curl/) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) -# endif(OX_HAVE_LIBCURL) -# -# -# if (NOT OX_HAVE_HTTP) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) -# endif(NOT OX_HAVE_HTTP) -# -# -# -# foreach(ITEM ${FOLDERS}) -# file(GLOB FLS -# ${OXYGINE_ROOT}/${ITEM}/*.cpp -# ${OXYGINE_ROOT}/${ITEM}/*.c -# ${OXYGINE_ROOT}/${ITEM}/*.h) -# set(SOURCES ${SOURCES} ${FLS}) -# string(REPLACE / \\ SGROUP ${ITEM}) -# source_group(${SGROUP} FILES ${FLS}) -# endforeach(ITEM) -# -# -# set(OXYGINE_INCLUDE_DIRS -# ${OXYGINE_SRC} -# ${THIRD_PARTY}/pthreads/include/ -# ${THIRD_PARTY}/zlib) -# -# -# set(OXYGINE_LIBRARY_DIRS -# ${OXYGINE_LIBRARY_DIRS} -# ${OXYGINE_SOURCE_DIR}/libs -# ${THIRD_PARTY}/libraries) -# -# -# if (FORCE_GLES) -# set(OPENGL_LIBRARIES libGLESv2.lib) -# endif(FORCE_GLES) -# -# -# if (MINGW) -# set(CORE_LIBS ${CORE_LIBS} mingw32) -# endif(MINGW) -# -# -# set(CORE_LIBS -# ${CORE_LIBS} -# oxygine-framework -# ${OPENGL_LIBRARIES} -# ) -# -# -# if (OX_USE_SDL2) -# set(CORE_LIBS ${CORE_LIBS} -# SDL2main SDL2) -# set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) -# endif(OX_USE_SDL2) -# -# -# if (WIN32) -# set(CORE_LIBS ${CORE_LIBS} -# pthreadVCE2 -# libcurl_imp -# ws2_32) -# elseif(EMSCRIPTEN) -# else(WIN32) -# set(CORE_LIBS ${CORE_LIBS} pthread) -# endif(WIN32) -# -# -# -# if (OX_HAVE_LIBPNG) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) -# include_directories(${THIRD_PARTY}/libpng) -# set(LIBPNG ${libprefix}png) -# -# if (MSVC) -# if (MSVC_VERSION EQUAL "1900") -# set(LIBPNG ${LIBPNG}-2015) -# endif() -# elseif(EMSCRIPTEN) -# set(LIBPNG libz libpng16) -# endif() -# -# set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) -# endif(OX_HAVE_LIBPNG) -# -# -# if (OX_HAVE_LIBJPEG) -# set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) -# include_directories(${THIRD_PARTY}/libjpeg) -# set(LIBJPEG ${libprefix}jpeg) -# -# if (MSVC) -# if (MSVC_VERSION EQUAL "1900") -# set(LIBJPEG ${LIBJPEG}-2015) -# endif() -# endif() -# -# set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) -# endif(OX_HAVE_LIBJPEG) -# -# -# if (NOT EMSCRIPTEN) -# set(CORE_LIBS ${CORE_LIBS} -# ${libprefix}z${libprefix}) -# endif(NOT EMSCRIPTEN) -# -# -# if (NOT MSVC) -# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ") -# endif(NOT MSVC) -# -# -# add_definitions(${OX_DEFINITIONS}) -# include_directories(${OXYGINE_INCLUDE_DIRS}) -# add_library(oxygine-framework STATIC ${SOURCES}) -# -# -# set(OXYGINE_LIBRARY_DIRS -# ${OXYGINE_LIBRARY_DIRS} -# PARENT_SCOPE) -# -# set(OXYGINE_CORE_LIBS -# ${CORE_LIBS} -# PARENT_SCOPE) -# -# set(OXYGINE_DEFINITIONS -# ${OX_DEFINITIONS} -# PARENT_SCOPE) -# -# set(OXYGINE_INCLUDE_DIRS -# ${OXYGINE_INCLUDE_DIRS} -# PARENT_SCOPE) -# -# set(OXYGINE_JS_LIBRARIES -# ${OXYGINE_JS_LIBRARIES} -# PARENT_SCOPE) -# -# message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") -# message(STATUS "Libs: ${CORE_LIBS}") -# message(STATUS "Platform: ${PLATFORM}") -# -# set(CMAKE_INSTALL_PREFIX ../libs) -# install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) -# install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) - - - - - - - - - - - - - - - - - - - - - - - - - - -cmake_minimum_required (VERSION 2.6) -project (OXYGINE) - -include("$ENV{CMAKI_PWD}/node_modules/cmaki/cmaki.cmake") -cmaki_find_package(sdl2 REQUIRED) -cmaki_find_package(freeimage REQUIRED) -cmaki_find_package(dune-zlib REQUIRED) -cmaki_find_package(haxx-libcurl REQUIRED) - -include_directories(${CMAKI_INCLUDE_DIRS}) -set(CORE_LIBS ${CORE_LIBS} ${CMAKI_LIBRARIES}) - -# find_package(OpenGL) -# -# if (EMSCRIPTEN) -# #don't need SDL2 -# elseif (WIN32) -# #hardcoded path to SDL2 on windows -# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# else(WIN32) -# find_path(SDL2_INCLUDE_DIRS NAMES SDL2/SDL.h) -# message(STATUS ${SDL2_INCLUDE_DIRS_FOUND}) -# -# if (SDL2_INCLUDE_DIRS) -# set(SDL2_INCLUDE_DIRS ${SDL2_INCLUDE_DIRS}/SDL2) -# message(STATUS "found") -# else() -# message(STATUS "SDL not found") -# set(SDL2_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../SDL/include) -# endif() -# -# find_package(CURL) -# endif(EMSCRIPTEN) - - -set(OXYGINE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/oxygine) -set(OXYGINE_SRC ${OXYGINE_ROOT}/src) - -set(FOLDERS src src/closure src/minizip src/core - src/core/gl src/dev_tools src/minizip - src/math src/pugixml src/json src/res - src/text_utils src/utils src/winnie_alloc) - - -if (EMSCRIPTEN) - set(PLATFORM emscripten) -elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - set(PLATFORM linux) -elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") - set(PLATFORM ios) -elseif(MSVC) - set(PLATFORM win32) -elseif(MINGW) - set(PLATFORM win32_mingw) -endif() - - -set(THIRD_PARTY ${OXYGINE_ROOT}/third_party/${PLATFORM}) - - - -if (EMSCRIPTEN) - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_HTTP 1) - set(OX_USE_SDL2 1) - - set(SOURCES ${OXYGINE_SRC}/core/emscripten/HttpRequestEmscriptenTask.cpp) - - - file(GLOB OXYGINE_JS_LIBRARIES ${OXYGINE_SRC}/core/emscripten/*.js) - - set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -s USE_SDL=2 -s USE_LIBPNG=1 -s USE_ZLIB=1 -s FULL_ES2=1 ") - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s USE_ZLIB=1")#for minizip.c - -elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux") - - set(OX_HAVE_LIBJPEG 1) - set(OX_HAVE_LIBPNG 1) - -elseif(${CMAKE_SYSTEM_NAME} MATCHES "Darwin") -elseif(MSVC) - - set(OX_HAVE_LIBJPEG 1) - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_LIBCURL 1) - set(OX_HAVE_HTTP 1) - - set(libprefix lib) - - set(OX_DEFINITIONS ${OX_DEFINITIONS} -D_CRT_SECURE_NO_WARNINGS) - -elseif(MINGW) - - set(libprefix lib) - - set(OX_HAVE_LIBPNG 1) - set(OX_HAVE_LIBCURL 1) - set(OX_HAVE_HTTP 1) - -endif() - -if (OX_HAVE_LIBCURL) - set(FOLDERS ${FOLDERS} src/core/curl) - include_directories(${THIRD_PARTY}/curl/) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBCURL) -endif(OX_HAVE_LIBCURL) - - - -if (NOT OX_HAVE_HTTP) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_HTTP) -endif(NOT OX_HAVE_HTTP) - -if (EMSCRIPTEN) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_NO_MT) -endif(EMSCRIPTEN) - -foreach(ITEM ${FOLDERS}) - file(GLOB FLS - ${OXYGINE_ROOT}/${ITEM}/*.cpp - ${OXYGINE_ROOT}/${ITEM}/*.c - ${OXYGINE_ROOT}/${ITEM}/*.h) - set(SOURCES ${SOURCES} ${FLS}) - string(REPLACE / \\ SGROUP ${ITEM}) - source_group(${SGROUP} FILES ${FLS}) -endforeach(ITEM) - - -set(OXYGINE_INCLUDE_DIRS - ${OXYGINE_SRC} - ${THIRD_PARTY}/pthreads/include/ - ${THIRD_PARTY}/zlib) - - -set(OXYGINE_LIBRARY_DIRS - ${OXYGINE_LIBRARY_DIRS} - ${OXYGINE_SOURCE_DIR}/libs - ${THIRD_PARTY}/libraries) - - -if (FORCE_GLES) - set(OPENGL_LIBRARIES libGLESv2.lib) -endif(FORCE_GLES) - - -if (MINGW) - set(CORE_LIBS ${CORE_LIBS} mingw32) -endif(MINGW) - - -set(CORE_LIBS - ${CORE_LIBS} - oxygine-framework - ${OPENGL_LIBRARIES} -) - - -if (OX_USE_SDL2) - set(CORE_LIBS ${CORE_LIBS} - SDL2main SDL2) - set(OXYGINE_INCLUDE_DIRS ${OXYGINE_INCLUDE_DIRS} ${SDL2_INCLUDE_DIRS}) -endif(OX_USE_SDL2) - - -if (WIN32) - set(CORE_LIBS ${CORE_LIBS} - pthreadVCE2 - libcurl_imp - ws2_32) -elseif(EMSCRIPTEN) -else(WIN32) - set(CORE_LIBS ${CORE_LIBS} pthread) -endif(WIN32) - - - -if (OX_HAVE_LIBPNG) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBPNG) - - if (EMSCRIPTEN) - - else(EMSCRIPTEN) - - include_directories(${THIRD_PARTY}/libpng) - set(LIBPNG ${libprefix}png) - - if (MSVC) - if(NOT (MSVC_VERSION LESS 1900)) - set(LIBPNG ${LIBPNG}-2015) - endif() - endif() - - set(CORE_LIBS ${CORE_LIBS} ${LIBPNG}) - - endif(EMSCRIPTEN) - -endif(OX_HAVE_LIBPNG) - - -if (OX_HAVE_LIBJPEG) - set(OX_DEFINITIONS ${OX_DEFINITIONS} -DOX_HAVE_LIBJPEG) - include_directories(${THIRD_PARTY}/libjpeg) - set(LIBJPEG ${libprefix}jpeg) - - if (MSVC) - if(NOT (MSVC_VERSION LESS 1900)) - set(LIBJPEG ${LIBJPEG}-2015) - endif() - endif() - - set(CORE_LIBS ${CORE_LIBS} ${LIBJPEG}) -endif(OX_HAVE_LIBJPEG) - - -if (NOT EMSCRIPTEN) - set(CORE_LIBS ${CORE_LIBS} - ${libprefix}z${libprefix}) -endif(NOT EMSCRIPTEN) - - -if (NOT MSVC) - set(OXYGINE_CXX_FLAGS "${OXYGINE_CXX_FLAGS} -std=c++11 ") -endif(NOT MSVC) - -set(CMAKE_CXX_FLAGS ${OXYGINE_CXX_FLAGS}) - -add_definitions(${OX_DEFINITIONS}) -include_directories(${OXYGINE_INCLUDE_DIRS}) -add_library(oxygine-framework STATIC ${SOURCES}) - - -set(OXYGINE_LIBRARY_DIRS - ${OXYGINE_LIBRARY_DIRS} - PARENT_SCOPE) - -set(OXYGINE_CORE_LIBS - ${CORE_LIBS} - PARENT_SCOPE) - -set(OXYGINE_DEFINITIONS - ${OX_DEFINITIONS} - PARENT_SCOPE) - -set(OXYGINE_INCLUDE_DIRS - ${OXYGINE_INCLUDE_DIRS} - PARENT_SCOPE) - -set(OXYGINE_JS_LIBRARIES - ${OXYGINE_JS_LIBRARIES} - PARENT_SCOPE) - -set(OXYGINE_CXX_FLAGS - ${OXYGINE_CXX_FLAGS} - PARENT_SCOPE) - - - -message(STATUS "SDL includes: ${SDL2_INCLUDE_DIRS}") -message(STATUS "Libs: ${CORE_LIBS}") -message(STATUS "Platform: ${PLATFORM}") - -set(CMAKE_INSTALL_PREFIX ../libs) -install(TARGETS oxygine-framework CONFIGURATIONS Debug DESTINATION ./debug) -install(TARGETS oxygine-framework CONFIGURATIONS Release DESTINATION ./release) - - - - - - - - - - - - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml deleted file mode 100644 index bbdc966..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/assimp.yml +++ /dev/null @@ -1,13 +0,0 @@ -- assimp: - <<: *thirdparty_defaults - version: 3.1.1.0 - mode: dr - source: http://downloads.sourceforge.net/project/assimp/assimp-3.1/assimp-3.1.1.zip - uncompress_strip: assimp-3.1.1 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - targets: - - assimp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml deleted file mode 100644 index e2fe3a4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/box2d.yml +++ /dev/null @@ -1,23 +0,0 @@ -- box2d: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - cmake_target: null - cmake_prefix: ./Box2D/CMakeLists.txt - cmake_definitions: - - BOX2D_BUILD_EXAMPLES=OFF - - BUILD_SHARED_LIBS=ON - - BOX2D_BUILD_SHARED=ON - - BOX2D_BUILD_STATIC=OFF - # - CMAKE_POSITION_INDEPENDENT_CODE=ON - post_install: - - ./Box2D/Box2D/*.h include/Box2D/ RECURSIVE - - ./Box2D/libBox2D.a lib/ - mode: dr - source: https://github.com/erincatto/Box2D.git - branch: -b v2.3.1 - targets: - - Box2D: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml deleted file mode 100644 index a33a569..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/bullet2.yml +++ /dev/null @@ -1,54 +0,0 @@ -- bullet2: - <<: *thirdparty_defaults - version: 2.83.6.0 - source: https://github.com/bulletphysics/bullet3/archive/2.83.6.tar.gz - uncompress_strip: bullet3-2.83.6 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - references: &bullet2_common_extra - default: - include: - - include/bullet - targets: - - LinearMath: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletCollision: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletDynamics: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - BulletSoftBody: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - # optional targets - - BulletFileLoader: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - ConvexDecomposition: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - GIMPACTUtils: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - - HACD: - info: - <<: *library_dynamic_exact - extra: - <<: *bullet2_common_extra - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml deleted file mode 100644 index 59a451e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/cryptopp.yml +++ /dev/null @@ -1,70 +0,0 @@ -- cryptopp: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - post_install: - - ./*.h include - - ./*.a lib - cmake_target: null - mode: dr - source: https://github.com/weidai11/cryptopp.git - branch: -b CRYPTOPP_5_6_5 - unittest: - | - // https://www.cryptopp.com/wiki/ChannelSwitch - #include - #include - #include - #include - #include - #include - - int main(int argc, char *argv[]) - { - std::string message = "Now is the time for all good men to come to the aide of their country"; - - // Allow user to override default message from command line arg. - if(argc == 2 && argv[1] != NULL) - message = std::string(argv[1]); - - // Set hash variables - std::string s1, s2, s3, s4; - CryptoPP::SHA1 sha1; CryptoPP::SHA224 sha224; CryptoPP::SHA256 sha256; CryptoPP::SHA512 sha512; - - // Run hash functions - CryptoPP::HashFilter f1(sha1, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s1))); - CryptoPP::HashFilter f2(sha224, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s2))); - CryptoPP::HashFilter f3(sha256, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s3))); - CryptoPP::HashFilter f4(sha512, new CryptoPP::HexEncoder(new CryptoPP::StringSink(s4))); - - // Set route to default - CryptoPP::ChannelSwitch cs; - cs.AddDefaultRoute(f1); - cs.AddDefaultRoute(f2); - cs.AddDefaultRoute(f3); - cs.AddDefaultRoute(f4); - - CryptoPP::StringSource ss(message, true /*pumpAll*/, new CryptoPP::Redirector(cs)); - - std::cout << "Message: " << message << std::endl; - std::cout << "SHA-1: " << s1 << std::endl; - std::cout << "SHA-224: " << s2 << std::endl; - std::cout << "SHA-256: " << s3 << std::endl; - std::cout << "SHA-512: " << s4 << std::endl; - } - cmake_definitions: - - BUILD_SHARED=OFF - - BUILD_SHARED_LIBS=OFF - - BUILD_STATIC=ON - - BUILD_TESTING=OFF - targets: - - cryptopp: - info: - <<: *library_static_exact - extra: - default: - definitions: - - -DCRYPTOPP_INIT_PRIORITY=1 - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml deleted file mode 100644 index 9ebf7cf..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-freetype.yml +++ /dev/null @@ -1,28 +0,0 @@ -- dune-freetype: - <<: *thirdparty_defaults - version: 1.0.0.0 - mode: dr - source: http://download.savannah.gnu.org/releases/freetype/freetype-2.6.tar.bz2 - uncompress_strip: freetype-2.6 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - unittest: - | - #include - #include FT_FREETYPE_H - int main() - { - FT_Library library; - FT_Init_FreeType( &library ); - return 0; - } - targets: - - freetype: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/freetype2 - - $PLATFORM/include/freetype2 - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml deleted file mode 100644 index ccb589b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-glew.yml +++ /dev/null @@ -1,29 +0,0 @@ -- dune-glew: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - cmake_target: null - mode: dr - source: https://github.com/nigels-com/glew.git - cmake_definitions: - - BUILD_SHARED_LIBS=ON - post_install: - - ./lib/* lib/ RECURSIVE - - ./include/* include/ RECURSIVE - build: - | - #!/bin/bash - pushd auto - make - popd - make -j $CORES - targets: - - GLEW: - info: - <<: *library_dynamic_exact - extra: - macos_64-clang_*-*: null - default: - system_depends: - - GL - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml deleted file mode 100644 index 04246cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/dune-zlib.yml +++ /dev/null @@ -1,38 +0,0 @@ -- dune-zlib: - <<: *thirdparty_defaults - version: 1.2.11.0 - mask: w - source: https://zlib.net/zlib-1.2.11.tar.gz - uncompress_strip: zlib-1.2.11 - unittest: - | - #include - int main() - { - z_stream infstream; - return 0; - } - targets: - - zlib: - info: - <<: *library_dynamic_exact - -- dune-zlib: - <<: *thirdparty_defaults - version: 1.2.11.0 - mask: mls - source: https://zlib.net/zlib-1.2.11.tar.gz - uncompress_strip: zlib-1.2.11 - unittest: - | - #include - int main() - { - z_stream infstream; - return 0; - } - targets: - - z: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml deleted file mode 100644 index 1dc4f97..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/fmod.yml +++ /dev/null @@ -1,20 +0,0 @@ -- fmod: - <<: *thirdparty_defaults - version: 1.0.1.0 - source: $NPP_SERVER/sources/fmodstudioapi11000linux.tar.gz - uncompress_strip: fmodstudioapi11000linux/api/lowlevel - post_install: - - ./lib/x86_64/* lib/ - - ./inc/*.h* include/ - build: - | - #!/bin/bash - echo installing fmod - targets: - - fmod: - info: - <<: *library_dynamic_exact - - fmodL: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml deleted file mode 100644 index 856f116..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage.yml +++ /dev/null @@ -1,36 +0,0 @@ -- freeimage: - <<: *thirdparty_defaults - version: 3.1.7.0 - source: https://github.com/Kanma/FreeImage - cmake_target: null - post_install: - - ./lib/*.a lib/ - targets: - - freeimage: - info: - <<: *library_static_exact - - jpeg: - info: - <<: *library_static_exact - - mng: - info: - <<: *library_static_exact - - openexr: - info: - <<: *library_static_exact - - openjpeg: - info: - <<: *library_static_exact - - png: - info: - <<: *library_static_exact - - rawlite: - info: - <<: *library_static_exact - - tiff: - info: - <<: *library_static_exact - - zlib: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml deleted file mode 100644 index c9352be..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/freeimage_cmake.yml +++ /dev/null @@ -1,40 +0,0 @@ -- freeimage: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - cmake_target: null - post_install: - - ./*.h include RECURSIVE - - ./lib/*.a lib - mode: dr - source: https://github.com/Kanma/FreeImage.git - targets: - - freeimage: - info: - <<: *library_static_exact - # - zlib: - # info: - # <<: *library_static_exact - - tiff: - info: - <<: *library_static_exact - - rawlite: - info: - <<: *library_static_exact - - png: - info: - <<: *library_static_exact - - openjpeg: - info: - <<: *library_static_exact - - openexr: - info: - <<: *library_static_exact - - mng: - info: - <<: *library_static_exact - - jpeg: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml deleted file mode 100644 index cf94535..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/google-gmock.yml +++ /dev/null @@ -1,61 +0,0 @@ -- google-gmock: - <<: *thirdparty_defaults - mask: w - source: https://github.com/google/googletest.git - branch: -b release-1.8.0 - post_install: - - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE - - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE - cmake_definitions: - - GTEST_LINKED_AS_SHARED_LIBRARY=1 - - BUILD_SHARED_LIBS=ON - - BUILD_GTEST=ON - - BUILD_GMOCK=ON - - gtest_build_samples=OFF - - gtest_build_tests=OFF - - gtest_disable_pthreads=OFF - - gmock_build_tests=OFF - - INSTALL_GTEST=ON - - INSTALL_GMOCK=ON - targets: - - gtest: - info: - <<: *library_dynamic_exact - - gmock: - info: - <<: *library_dynamic_exact - - gmock_main: - info: - <<: *library_dynamic_exact - - -- google-gmock: - <<: *thirdparty_defaults - mask: mls - source: https://github.com/google/googletest.git - branch: -b release-1.8.0 - post_install: - - ./googlemock/include/gmock/*.h* include/gmock/ RECURSIVE - - ./googletest/include/gtest/*.h* include/gtest/ RECURSIVE - cmake_definitions: - - BUILD_SHARED_LIBS=OFF - - BUILD_GTEST=ON - - BUILD_GMOCK=ON - - gtest_build_samples=OFF - - gtest_build_tests=OFF - - gtest_disable_pthreads=OFF - - gmock_build_tests=OFF - - INSTALL_GTEST=ON - - INSTALL_GMOCK=ON - targets: - - gtest: - info: - <<: *library_static_exact - - gmock: - info: - <<: *library_static_exact - - gmock_main: - info: - <<: *library_static_exact - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml deleted file mode 100644 index ffd8870..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/gwen.yml +++ /dev/null @@ -1,11 +0,0 @@ -- gwen: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - source: https://github.com/garrynewman/GWEN.git - targets: - - gwen: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml deleted file mode 100644 index 8c14ec5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/haxx-libcurl.yml +++ /dev/null @@ -1,71 +0,0 @@ -- haxx-libcurl: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/curl/curl.git - branch: -b curl-7_59_0 - depends: - - openssl - cmake_definitions: - - CMAKE_USE_OPENSSL=ON - unittest: - | - #include - #include - int main() - { - CURL* curl = curl_easy_init(); - return 0; - } - references: - library_dynamic: &library_dynamic_curl - common: &library_dynamic_common_curl - include: - - $PLATFORM/include - - include - windows: &library_dynamic_windows_curl - <<: *library_dynamic_common_curl - dynamic: - debug: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - lib$TARGET.pdb - relwithdebinfo: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - lib$TARGET.pdb - release: - dll: - lib$TARGET.dll - lib: - lib$TARGET_imp.lib - pdb: - null - - unix: &library_dynamic_unix_curl - <<: *library_dynamic_common_curl - dynamic: - debug: - so: - lib/lib$TARGET-d.so - relwithdebinfo: - so: - lib/lib$TARGET.so - release: - so: - lib/lib$TARGET.so - windows_*-msvc_*-*: - <<: *library_dynamic_windows_curl - default: - <<: *library_dynamic_unix_curl - targets: - - curl: - info: - <<: *library_dynamic_curl - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml deleted file mode 100644 index e8920b1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/json.yml +++ /dev/null @@ -1,26 +0,0 @@ -- json: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - version_manager: git - post_install: - - ./src/*.h* include - cmake_target: null - source: https://github.com/nlohmann/json.git - branch: -b v3.0.1 - cmake_definitions: - - JSON_BuildTests=OFF - unittest: - | - #include - using json = nlohmann::json; - int main() - { - json j1; - return 0; - } - targets: - - dummy: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml deleted file mode 100644 index 05d54dd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/librocket.yml +++ /dev/null @@ -1,24 +0,0 @@ -- librocket: - <<: *thirdparty_defaults - version: 0.0.0.0 - mode: dr - cmake_target: null - post_install: - - ./Include/Rocket/*.h include/Rocket/ recursive - - ./Include/Rocket/*.inl include/Rocket/ recursive - version_manager: git - source: https://github.com/libRocket/libRocket.git - branch: -b stable - depends: - - dune-freetype - targets: - - RocketCore: - info: - <<: *library_dynamic_exact - - RocketDebugger: - info: - <<: *library_dynamic_exact - - RocketControls: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml deleted file mode 100644 index 7d76144..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/msgpack.yml +++ /dev/null @@ -1,10 +0,0 @@ -- msgpack: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/msgpack/msgpack-c.git - targets: - - msgpackc: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml deleted file mode 100644 index 4cbfa70..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/noise.yml +++ /dev/null @@ -1,11 +0,0 @@ -- noise: - <<: *thirdparty_defaults - version: 1.0.0.0 - mode: dr - source: http://downloads.sourceforge.net/project/libnoise/libnoise%20sources/1.0.0/libnoisesrc-1.0.0.zip - uncompress_strip: noise - targets: - - noise: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml deleted file mode 100644 index 06bada0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/ois.yml +++ /dev/null @@ -1,19 +0,0 @@ -- ois: - <<: *thirdparty_defaults - version: 1.3.0.0 - mode: dr - source: http://downloads.sourceforge.net/project/wgois/Source%20Release/1.3/ois_v1-3.tar.gz - uncompress_strip: ois-v1-3 - build: - | - #!/bin/bash - # depends: libxaw7-dev - source find.script - chmod +x bootstrap - ./bootstrap - ./configure --prefix=$ois_HOME && make -j $CORES && make -j $CORES install - exit $? - targets: - - OIS: - info: - <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml deleted file mode 100644 index 4011d09..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/openssl.yml +++ /dev/null @@ -1,24 +0,0 @@ -- openssl: - <<: *thirdparty_defaults - source: https://github.com/pol51/OpenSSL-CMake.git - branch: -b OpenSSL_1_1_0 - build: - | - #!/bin/bash - # if [[ $BUILD_MODE == 'Debug' ]] - # then - # ./Configure --openssldir=$SELFHOME debug-linux-x86_64 - # else - # ./Configure --openssldir=$SELFHOME linux-x86_64 - # fi - ./config --prefix=$SELFHOME - make - make install - targets: - - ssl: - info: - <<: *library_static_exact - - crypto: - info: - <<: *library_static_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml deleted file mode 100644 index eb53ab4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/oxygine.yml +++ /dev/null @@ -1,25 +0,0 @@ -- oxygine: - <<: *thirdparty_defaults - cmake_target: null - cmake_definitions: - - BUILD_SHARED_LIBS=OFF - - CMAKE_POSITION_INDEPENDENT_CODE=ON - - OX_HAVE_LIBJPEG=1 - - OX_HAVE_LIBPNG=1 - - OX_HAVE_LIBCURL=1 - - OX_HAVE_HTTP=0 - - OX_USE_SDL2=1 - depends: - - sdl2 - - freeimage - - haxx-libcurl - source: https://github.com/oxygine/oxygine-framework.git - targets: - - oxygine-framework: - info: - <<: *library_static_exact - extra: - default: - definitions: - - -DOXYGINE_SDL=1 - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml deleted file mode 100644 index 0d9c5f9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqtt3.yml +++ /dev/null @@ -1,22 +0,0 @@ -- paho-mqtt3: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - post_install: - - ./src/*.h include - source: https://github.com/eclipse/paho.mqtt.c.git - branch: -b develop - cmake_definitions: - - BUILD_SHARED_LIBS=ON - - BUILD_TESTING=OFF - - BUILD_STATIC=OFF - - BUILD_SHARED=ON - targets: - - paho-mqtt3c: - info: - <<: *library_dynamic_exact - - paho-mqtt3a: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml deleted file mode 100644 index 5d52565..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/paho-mqttpp3.yml +++ /dev/null @@ -1,21 +0,0 @@ -- paho-mqttpp3: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - mode: dr - depends: - - paho-mqtt3 - post_install: - - ./src/mqtt/*.h include/mqtt - mode: dr - source: https://github.com/eclipse/paho.mqtt.cpp.git - cmake_definitions: - - BUILD_SHARED_LIBS=ON - - BUILD_TESTING=OFF - - BUILD_STATIC=OFF - - BUILD_SHARED=ON - - PAHO_WITH_SSL=OFF - targets: - - paho-mqttpp3: - info: - <<: *library_dynamic_exact diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml deleted file mode 100644 index df8c388..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/pugixml.yml +++ /dev/null @@ -1,11 +0,0 @@ -- pugixml: - <<: *thirdparty_defaults - source: http://github.com/zeux/pugixml/releases/download/v1.8/pugixml-1.8.tar.gz - uncompress_strip: pugixml-1.8 - cmake_definitions: - - BUILD_SHARED_LIBS=ON - targets: - - pugixml: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml deleted file mode 100644 index bc7cb10..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/python.yml +++ /dev/null @@ -1,21 +0,0 @@ -- python: - <<: *thirdparty_defaults - source: https://github.com/python-cmake-buildsystem/python-cmake-buildsystem.git - cmake_definitions: - - BUILD_SHARED=FALSE - - BUILD_STATIC=TRUE - targets: - - python3.5m: - info: - <<: *library_static_exact - extra: - default: - include: - - include/python3.5m - system_depends: - - dl - - util - - python: - info: - <<: *executable_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml deleted file mode 100644 index 643b0c7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/raknet.yml +++ /dev/null @@ -1,11 +0,0 @@ -- raknet: - <<: *thirdparty_defaults - cmake_target: null - source: https://github.com/facebookarchive/RakNet.git - post_install: - - ./Source/*.h* include/raknet/ - targets: - - RakNetDLL: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml deleted file mode 100644 index 5707070..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/restclient-cpp.yml +++ /dev/null @@ -1,17 +0,0 @@ -- restclient-cpp: - <<: *thirdparty_defaults - source: https://github.com/mrtazz/restclient-cpp - depends: - - haxx-libcurl - build: - | - #!/bin/bash - source $(pwd)/../haxx-libcurl/find.script - ./autogen.sh - CXXFLAGS=-I$haxx_libcurl_HOME/include ./configure --prefix=$SELFHOME - make install - targets: - - restclient-cpp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml deleted file mode 100644 index 13d07b4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/sdl2.yml +++ /dev/null @@ -1,38 +0,0 @@ -- sdl2: - <<: *thirdparty_defaults - mask: wl - version: 2.0.8.0 - source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz - uncompress_strip: SDL2-2.0.8 - depends: - - dune-glew - mode: dr - targets: - - SDL2-2.0: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/SDL2 - - -- sdl2: - <<: *thirdparty_defaults - mask: m - version: 2.0.8.0 - source: https://www.libsdl.org/release/SDL2-2.0.8.tar.gz - uncompress_strip: SDL2-2.0.8 - depends: - - dune-glew - mode: dr - targets: - - SDL2: - info: - <<: *library_dynamic_exact - extra: - default: - include: - - include/SDL2 - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml deleted file mode 100644 index 29c143d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/spdlog.yml +++ /dev/null @@ -1,14 +0,0 @@ -- spdlog: - <<: *thirdparty_defaults - version: 0.0.0.0 - version_manager: git - source: https://github.com/gabime/spdlog.git - branch: -b v0.16.3 - post_install: - - ./include/*.h* include/ RECURSIVE - - ./include/*.cc* include/ RECURSIVE - targets: - - dummy: - info: - <<: *library_dynamic - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml deleted file mode 100644 index d01d5e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/tbb.yml +++ /dev/null @@ -1,49 +0,0 @@ -- intel-tbb: - <<: *thirdparty_defaults - version: 4.4.0.0 - source: https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20150728oss_src.tgz - uncompress_strip: tbb44_20150728oss - build: - | - #!/bin/bash - source find.script - make info > info_.txt - tail -n +2 info_.txt > info.txt - source info.txt - make - code=$? - # install - cp -Rf include/ $intel_tbb_HOME - if [[ $BUILD_MODE == 'Debug' ]] - then - cp -Rf build/${tbb_build_prefix}_debug/*.so* $intel_tbb_HOME - else - cp -Rf build/${tbb_build_prefix}_release/*.so* $intel_tbb_HOME - fi - for i in $(find $intel_tbb_HOME -name "*.so"); do - name=$(basename $i) - echo rm $i - echo ln -sf $name.2 $i - rm $i - ln -sf $name.2 $i - done - exit $code - - targets: - - tbb: - info: - <<: *library_dynamic_exact - extra: - \*-debug: - definitions: - - -DTBB_USE_DEBUG=1 - default: - definitions: - - -DTBB_USE_DEBUG=0 - - tbbmalloc: - info: - <<: *library_dynamic_exact - - tbbmalloc_proxy: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml deleted file mode 100644 index 34d5cc9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packages/yamlcpp.yml +++ /dev/null @@ -1,16 +0,0 @@ -- yamlcpp: - <<: *thirdparty_defaults - mode: dr - version: 0.0.0.0 - version_manager: git - cmake_target: null - cmake_definitions: - - BUILD_SHARED_LIBS=ON - post_install: - - ./include/yaml-cpp/*.h include/yaml-cpp RECURSIVE - source: https://github.com/jbeder/yaml-cpp.git - targets: - - yaml-cpp: - info: - <<: *library_dynamic_exact - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py deleted file mode 100644 index fcb2872..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/packing.py +++ /dev/null @@ -1,139 +0,0 @@ -import os -import sys -import utils -import logging -import hash_version -from itertools import product -from third_party import platforms -from third_party import get_identifier - - -def print_folder(folder): - for root, dirs, files in os.walk(folder): - path = root.split(os.sep) - logging.info((len(path) - 1) * '... ' + '%s/' % os.path.basename(root)) - for file in files: - logging.info(len(path) * '... ' + '%s' % file) - - -def packing(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - version_git = node.get_version() - packing = node.is_packing() - if not packing: - logging.warning('Skiping package: %s' % package) - return 0 - - manager = node.get_version_manager() - if manager == "git": - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - revision_git = hash_version.get_last_changeset(build_directory, short=False) - version_old = node.get_version() - version_git = hash_version.to_cmaki_version(build_directory, revision_git) - logging.info('[git] Renamed version from %s to %s' % (version_old, version_git)) - - current_workspace = node.get_binary_workspace(plat) - current_base = node.get_base_folder() - oldversion = node.get_version() - try: - node.set_version(version_git) - updated_workspace = node.get_binary_workspace(plat) - updated_base = node.get_base_folder() - - current_base2 = os.path.join(current_workspace, current_base) - updated_base2 = os.path.join(current_workspace, updated_base) - logging.debug("from: %s" % current_base2) - logging.debug("to: %s" % updated_base2) - if current_base != updated_base: - utils.move_folder_recursive(current_base2, updated_base2) - logging.debug('-- copy from: {}, {}'.format(current_workspace, os.path.exists(current_workspace))) - logging.debug('-- copy to: {}, {}'.format(updated_workspace, os.path.exists(updated_workspace))) - utils.move_folder_recursive(current_workspace, updated_workspace) - finally: - node.set_version(oldversion) - - node.set_version(version_git) - version = node.get_version() - - # regenerate autoscripts with new version - node.generate_scripts_headers(compiler_replace_maps) - - # # generate versions.cmake - node.generate_3rdpartyversion(parameters.prefix) - - precmd = '' - if utils.is_windows(): - precmd = 'cmake -E ' - - folder_3rdparty = parameters.third_party_dir - output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) - utils.trymkdir(output_3rdparty) - - folder_mark = os.path.join(parameters.prefix, node.get_base_folder()) - utils.trymkdir(folder_mark) - - utils.superverbose(parameters, '*** [%s] Generation cmakefiles *** %s' % (package, output_3rdparty)) - errors = node.generate_cmakefiles(platforms, output_3rdparty, compiler_replace_maps) - logging.debug('errors generating cmakefiles: %d' % errors) - node.ret += abs(errors) - - for plat in platforms: - utils.superverbose(parameters, '*** [%s (%s)] Generating package .tar.gz (%s) ***' % (package, version, plat)) - workspace = node.get_workspace(plat) - current_workspace = node.get_binary_workspace(plat) - utils.trymkdir(current_workspace) - with utils.working_directory(current_workspace): - - logging.info('working directory: {}'.format(current_workspace)) - - if utils.is_windows(): - utils.safe_system('del /s *.ilk') - utils.safe_system('del /s *.exp') - - current_base = node.get_base_folder() - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) - prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) - - logging.info('generating package %s from source %s' % (prefix_package, os.path.join(os.getcwd(), current_base))) - logging.info('generating md5file %s' % prefix_package_md5) - print_folder(current_base) - - # packing install - gen_targz = "%star zcvf %s %s" % (precmd, prefix_package, current_base) - - node.ret += abs( node.safe_system(gen_targz, compiler_replace_maps) ) - if not os.path.exists(prefix_package): - logging.error('No such file: {}'.format(prefix_package)) - return False - - # calculate md5 file - package_md5 = utils.md5sum(prefix_package) - logging.debug("new package {}, with md5sum {}".format(prefix_package, package_md5)) - with open(prefix_package_md5, 'wt') as f: - f.write('%s\n' % package_md5) - - # packing cmakefiles (more easy distribution) - if not parameters.no_packing_cmakefiles: - for plat in platforms: - current_base = node.get_base_folder() - prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (current_base, plat)) - with utils.working_directory(folder_3rdparty): - - logging.info('working directory: {}'.format(folder_3rdparty)) - - logging.debug('working dir: %s' % folder_3rdparty) - logging.info('generating package cmake %s' % prefix_package_cmake) - print_folder(current_base) - - gen_targz_cmake = '{}tar zcvf {} {}'.format(precmd, prefix_package_cmake, current_base) - node.ret += abs( node.safe_system(gen_targz_cmake, compiler_replace_maps) ) - if not os.path.exists(prefix_package_cmake): - logging.error('No such file: {}'.format(prefix_package_cmake)) - return False - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt deleted file mode 100644 index dcb2251..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/CMakeLists.txt +++ /dev/null @@ -1,75 +0,0 @@ - -#******************************************************************************* -# Copyright (c) 2016 -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Eclipse Public License v1.0 -# and Eclipse Distribution License v1.0 which accompany this distribution. -# -# The Eclipse Public License is available at -# http://www.eclipse.org/legal/epl-v10.html -# and the Eclipse Distribution License is available at -# http://www.eclipse.org/org/documents/edl-v10.php. -# -# Contributors: -# Guilherme Maciel Ferreira - initial version -#*******************************************************************************/ - -## Note: on OS X you should install XCode and the associated command-line tools - -## cmake flags -cmake_minimum_required(VERSION 3.1 FATAL_ERROR) - -## project name -project("paho-mqtt-cpp" LANGUAGES CXX) - -include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) -set(PAHO_MQTT_C_PATH "${paho_mqtt3_LIBDIR}" CACHE PATH "Add a path to paho.mqtt.c library and headers") - -## library name -set(PAHO_MQTT_CPP paho-mqttpp3) - -## build settings -set(PAHO_VERSION_MAJOR 0) -set(PAHO_VERSION_MINOR 9) -set(PAHO_VERSION_PATCH 0) - -set(CLIENT_VERSION ${PAHO_VERSION_MAJOR}.${PAHO_VERSION_MINOR}.${PAHO_VERSION_PATCH}) -set(CPACK_PACKAGE_VERSION_MAJOR ${PAHO_VERSION_MAJOR}) -set(CPACK_PACKAGE_VERSION_MINOR ${PAHO_VERSION_MINOR}) -set(CPACK_PACKAGE_VERSION_PATCH ${PAHO_VERSION_PATCH}) - -## build options -set(PAHO_BUILD_STATIC FALSE CACHE BOOL "Build static library") -set(PAHO_BUILD_SAMPLES FALSE CACHE BOOL "Build sample programs") -set(PAHO_BUILD_DOCUMENTATION FALSE CACHE BOOL "Create and install the HTML based API documentation (requires Doxygen)") -set(PAHO_MQTT_C paho-mqtt3a) -SET(PAHO_WITH_SSL TRUE CACHE BOOL "Flag that defines whether to build ssl-enabled binaries too. ") - -## build flags -set(CMAKE_CXX_STANDARD 11) -set(CMAKE_CXX_STANDARD_REQUIRED ON) -set(CMAKE_CXX_EXTENSIONS OFF) - -## build directories - -add_subdirectory(src) -add_subdirectory(src/mqtt) - -if(PAHO_BUILD_SAMPLES) - add_subdirectory(src/samples) -endif() - -if(PAHO_BUILD_DOCUMENTATION) - add_subdirectory(doc) -endif() - -## packaging settings -if(WIN32) - set(CPACK_GENERATOR "ZIP") -elseif(UNIX) - set(CPACK_GENERATOR "TGZ") -endif() - -include(CPack) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt deleted file mode 100644 index d35ab8b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/paho-mqttpp3/src/CMakeLists.txt +++ /dev/null @@ -1,161 +0,0 @@ -#******************************************************************************* -# Copyright (c) 2016 -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Eclipse Public License v1.0 -# and Eclipse Distribution License v1.0 which accompany this distribution. -# -# The Eclipse Public License is available at -# http://www.eclipse.org/legal/epl-v10.html -# and the Eclipse Distribution License is available at -# http://www.eclipse.org/org/documents/edl-v10.php. -# -# Contributors: -# Guilherme Maciel Ferreira - initial version -#*******************************************************************************/ - -## Note: on OS X you should install XCode and the associated command-line tools - -include(${PACKAGE_BUILD_DIRECTORY}/../paho-mqtt3/find.cmake) -set(paho_mqtt3_LIBRARIES paho-mqtt3c paho-mqtt3a) -link_directories("${paho_mqtt3_LIBDIR}") -include_directories("${paho_mqtt3_INCLUDE}") -# TODO: use find_package -# find_package(paho-mqtt3 REQUIRED) - -## include directories -include_directories(${CMAKE_CURRENT_SOURCE_DIR}) - - -## libraries -if(WIN32) - set(LIBS_SYSTEM - ws2_32) -elseif(UNIX) - if(CMAKE_SYSTEM_NAME MATCHES "Linux") - set(LIB_DL dl) - endif() - set(LIBS_SYSTEM - ${LIB_DL} - c - stdc++ - pthread) -endif() - -## use Object Library to optimize compilation -set(COMMON_SRC - async_client.cpp - client.cpp - disconnect_options.cpp - iclient_persistence.cpp - message.cpp - response_options.cpp - ssl_options.cpp - string_collection.cpp - token.cpp - topic.cpp - connect_options.cpp - will_options.cpp) - -if(PAHO_WITH_SSL) - add_definitions(-DOPENSSL) -endif() - -add_library(common_obj OBJECT - ${COMMON_SRC}) - -## set position independent flag (-fPIC on Unix) -set_property(TARGET common_obj - PROPERTY POSITION_INDEPENDENT_CODE ON) - -## create the shared library -add_library(${PAHO_MQTT_CPP} SHARED - $) - -## add dependencies to the shared library -target_link_libraries(${PAHO_MQTT_CPP} - ${LIBS_SYSTEM}) - -## set the shared library soname -set_target_properties(${PAHO_MQTT_CPP} PROPERTIES - VERSION ${CLIENT_VERSION} - SOVERSION ${PAHO_VERSION_MAJOR}) - -## install the shared library -install(TARGETS ${PAHO_MQTT_CPP} - ARCHIVE DESTINATION lib - LIBRARY DESTINATION lib - RUNTIME DESTINATION bin) - -## build static version of the Paho MQTT C++ library -if(PAHO_BUILD_STATIC) - ## create the static library - add_library(${PAHO_MQTT_CPP}-static STATIC - $) - - ## add dependencies to the static library - target_link_libraries(${PAHO_MQTT_CPP}-static - ${LIBS_SYSTEM}) - - ## install the static library - install(TARGETS ${PAHO_MQTT_CPP}-static - ARCHIVE DESTINATION lib - LIBRARY DESTINATION lib) -endif() - -## extract Paho MQTT C include directory -get_filename_component(PAHO_MQTT_C_DEV_INC_DIR ${PAHO_MQTT_C_PATH}/src ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD_INC_DIR ${PAHO_MQTT_C_PATH}/include ABSOLUTE) -set(PAHO_MQTT_C_INC_DIR - ${PAHO_MQTT_C_DEV_INC_DIR} - ${PAHO_MQTT_C_STD_INC_DIR}) - -## extract Paho MQTT C library directory -get_filename_component(PAHO_MQTT_C_DEV_LIB_DIR ${PAHO_MQTT_C_PATH}/build/output ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD_LIB_DIR ${PAHO_MQTT_C_PATH}/lib ABSOLUTE) -get_filename_component(PAHO_MQTT_C_STD64_LIB_DIR ${PAHO_MQTT_C_PATH}/lib64 ABSOLUTE) -set(PAHO_MQTT_C_LIB_DIR - ${PAHO_MQTT_C_DEV_LIB_DIR} - ${PAHO_MQTT_C_STD_LIB_DIR} - ${PAHO_MQTT_C_STD64_LIB_DIR}) - -## extract Paho MQTT C binary directory (Windows may place libraries there) -get_filename_component(PAHO_MQTT_C_BIN_DIR ${PAHO_MQTT_C_PATH}/bin ABSOLUTE) - -## add library suffixes so Windows can find Paho DLLs -set(CMAKE_FIND_LIBRARY_PREFIXES ${CMAKE_FIND_LIBRARY_PREFIXES} "") -set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES} ".dll" ".lib") - -if(PAHO_WITH_SSL) - ## find the Paho MQTT C SSL library - find_library(PAHO_MQTT_C_LIB - NAMES paho-mqtt3as - mqtt3as - PATHS ${PAHO_MQTT_C_LIB_DIR} - ${PAHO_MQTT_C_BIN_DIR}) - - find_package(OpenSSL REQUIRED) -else() - ## find the Paho MQTT C library - find_library(PAHO_MQTT_C_LIB - NAMES paho-mqtt3a - mqtt - paho-mqtt - mqtt3 - paho-mqtt3 - mqtt3a - PATHS ${PAHO_MQTT_C_LIB_DIR} - ${PAHO_MQTT_C_BIN_DIR}) -endif() - -## use the Paho MQTT C library if found. Otherwise terminate the compilation -if(${PAHO_MQTT_C_LIB} STREQUAL "PAHO_MQTT_C_LIB-NOTFOUND") - message(FATAL_ERROR "Could not find Paho MQTT C library") -else() - include_directories(${PAHO_MQTT_C_INC_DIR}) - link_directories(${PAHO_MQTT_C_LIB_DIR}) - target_link_libraries(${PAHO_MQTT_CPP} - ${PAHO_MQTT_C_LIB} - ${paho_mqtt3_LIBRARIES}) -endif() - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py deleted file mode 100644 index d0c44ed..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/pipeline.py +++ /dev/null @@ -1,287 +0,0 @@ -import os -import sys -import logging -import contextlib -import utils -import shutil -from third_party import exceptions_fail_group -from third_party import exceptions_fail_program -from third_party import FailThirdParty - - -def make_pipe(): - def process(): - pass - return process - - -def end_pipe(): - def process(p): - _ = list(p) - return process - - -def _create(): - b = make_pipe() - e = yield b - end_pipe()(e) - yield - - -@contextlib.contextmanager -def create(): - c = _create() - p = next(c) - yield (p, c) - - -def feed(packages): - def process(_): - for node in packages: - yield node - return process - - -def do(function, force, *args, **kwargs): - ''' - skeleton gtc stage - ''' - def process(packages): - def _process(): - for node in packages: - try: - package = node.get_package_name() - version = node.get_version() - - if not force: - # skip process if package came with error - if node.ret != 0: - logging.info('%s %s error detected: skiping' % (function.__name__, package)) - continue - - # skip process if package came interrupted - if node.interrupted: - logging.info('%s %s error detected: skiping' % (function.__name__, package)) - continue - - if function.__name__ != 'purge': - logger_function = logging.info - else: - logger_function = logging.debug - - logger_function('--------- begin@%s: %s (%s) --------' % (function.__name__, package, version)) - - # process package - ret = function(node, *args, **kwargs) - logging.debug('%s: return %s' % (function.__name__, ret)) - if isinstance(ret, bool): - if not ret: - node.ret += 1 - elif isinstance(ret, int): - # aggregation result - node.ret += abs(ret) - else: - logging.error('%s %s error invalid return: %s' % (function.__name__, package, ret)) - node.ret += 1 - - logger_function('--------- end@%s: %s (%s) --------' % (function.__name__, package, version)) - - if node.ret != 0: - node.fail_stage = function.__name__ - raise FailThirdParty('[exception] %s fail in stage: %s' % (package, function.__name__)) - - except FailThirdParty: - logging.error('fatal exception in package %s (%s)' % (package, version)) - node.ret += 1 - node.fail_stage = function.__name__ - raise - except exceptions_fail_group: - logging.error('fatal exception in package %s (%s)' % (package, version)) - node.ret += 1 - # add exception for show postponed - node.exceptions.append(sys.exc_info()) - node.fail_stage = function.__name__ - raise - except exceptions_fail_program: - logging.error('interruption in package %s (%s)' % (package, version)) - node.ret += 1 - node.fail_stage = function.__name__ - node.interrupted = True - raise - except: - # excepciones por fallos de programacion - logging.error('Postponed exception in package %s (%s)' % (package, version)) - node.ret += 1 - node.exceptions.append(sys.exc_info()) - node.fail_stage = function.__name__ - finally: - # send to next step - yield node - - for node in _process(): - yield node - return process - -####################### PIPELINE PROOF CONCEPT (UNDER CODE IS NOT USED) ############### - - -def echo(line): - def process(_): - yield line - return process - - -def cat(): - def process(p): - for line in p: - if(os.path.exists(line)): - with open(line, 'rt') as f: - for line2 in f: - yield line2 - else: - logging.warning(' filename %s not exists' % line) - return process - - -def find(folder, level=999): - def process(_): - for root, dirs, files in utils.walklevel(folder, level): - for name in files: - yield os.path.join(root, name) - return process - - -def grep(pattern): - def process(p): - for line in p: - if line.find(pattern) != -1: - yield line - return process - - -def grep_basename(pattern): - def process(p): - p0 = pattern[:1] - pL = pattern[-1:] - fixed_pattern = pattern.replace('*', '') - for line in p: - if(p0 == '*' and pL != '*'): - if os.path.basename(line).endswith(fixed_pattern): - yield line.replace('\\', '/') - elif(p0 != '*' and pL == '*'): - if os.path.basename(line).startswith(fixed_pattern): - yield line.replace('\\', '/') - else: - if os.path.basename(line).find(fixed_pattern) != -1: - yield line.replace('\\', '/') - return process - - -def grep_v(pattern): - def process(p): - for line in p: - if line.find(pattern) == -1: - yield line - return process - - -def endswith(pattern): - def process(p): - for line in p: - if line.endswith(pattern): - yield line - return process - - -def copy(rootdir, folder): - def process(p): - for line in p: - relfilename = os.path.relpath(line, rootdir) - destiny = os.path.join(folder, relfilename) - destiny_dir = os.path.dirname(destiny) - utils.trymkdir(destiny_dir) - shutil.copyfile(line, destiny) - if not os.path.exists(destiny): - raise Exception("Not exists %s" % destiny) - yield destiny - return process - - -def startswith(pattern): - def process(p): - for line in p: - if line.startswith(pattern): - yield line - return process - - -def printf(prefix = ''): - def process(p): - for line in p: - print("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def info(prefix = ''): - def process(p): - for line in p: - logging.info("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def debug(prefix = ''): - def process(p): - for line in p: - logging.debug("%s%s" % (prefix, line.rstrip())) - yield line - return process - - -def write_file(filename, mode='wt'): - def process(p): - content = [] - for line in p: - content.append(line) - with open(filename, mode) as f: - for line in content: - f.write('%s\n' % line.rstrip()) - for line in content: - yield line - return process - - -def tee(filename): - def process(p): - p = printf()(p) - p = write_file(filename)(p) - for line in p: - yield line - return process - - -def example_context(): - # using context - with create() as (p, finisher): - p = find('.')(p) - p = endswith('.cpp')(p) - p = cat()(p) - p = tee('result.txt')(p) - # send last part - finisher.send(p) - - -def example_simple(): - # not using context - p = make_pipe() - # begin - p = find('.', 2)(p) - p = endswith('.yml')(p) - p = grep_v('.build_')(p) - p = tee('result.txt')(p) - # end - end_pipe()(p) - -if __name__ == '__main__': - example_simple() diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py deleted file mode 100644 index d15de46..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/prepare.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -import sys -import utils -import logging -import shutil -from third_party import platforms -from third_party import build_unittests_foldername -from itertools import product -from third_party import prefered - - -def prepare(node, parameters, compiler_replace_maps): - - package = node.get_package_name() - - # source folder - source_dir = os.path.join(os.getcwd(), package) - utils.trymkdir(source_dir) - - # generate .build.sh / .build.cmd if is defined in yaml - node.get_generate_custom_script(source_dir) - - # generate find.script / find.cmd - node.generate_scripts_headers(compiler_replace_maps) - - # read root CMakeLists.txt - with open('CMakeLists.txt', 'rt') as f: - content_cmakelists = f.read() - - # OJO: dejar de borrar cuando reciclemos binarios - node.remove_packages() - - # run_tests or packing - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - logging.info('Preparing mode %s - %s' % (plat, build_mode)) - build_directory = os.path.join(os.getcwd(), node.get_build_directory(plat, build_mode)) - utils.trymkdir(build_directory) - - # download source and prepare in build_directory - node.prepare_third_party(build_directory, compiler_replace_maps) - - # copy source files to build - logging.debug('Copy sources to build: %s -> %s' % (source_dir, build_directory)) - utils.copy_folder_recursive(source_dir, build_directory) - - # before copy files - with utils.working_directory(build_directory): - for bc in node.get_before_copy(): - chunks = [x.strip() for x in bc.split(' ') if x] - if len(chunks) != 2: - raise Exception('Invalid value in before_copy: %s' % bc) - logging.debug('Copy "%s" to "%s"' % (chunks[0], chunks[1])) - shutil.copy2(chunks[0], chunks[1]) - - # if have cmakelists, insert root cmakelists header - cmake_prefix = node.get_cmake_prefix() - build_cmakelist = os.path.join(build_directory, cmake_prefix, 'CMakeLists.txt') - if os.path.exists(build_cmakelist) and (not node.has_custom_script(source_dir)): - with open(build_cmakelist, 'rt') as f: - content_cmakelists_package = f.read() - with open(build_cmakelist, 'wt') as f: - f.write('%s\n' % content_cmakelists) - f.write('%s\n' % content_cmakelists_package) - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "prepare"') - break - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py deleted file mode 100644 index 2349465..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/purge.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import utils -import logging -from third_party import platforms - -def purge(node, parameters): - - package = node.get_package_name() - - logging.debug("Cleaning headers and cmakefiles %s" % package) - node.remove_scripts_headers() - node.remove_cmakefiles() - - logging.debug("Cleaning download %s" % package) - uncompress_directory = node.get_download_directory() - utils.tryremove_dir(uncompress_directory) - - original_directory = node.get_original_directory() - utils.tryremove_dir(original_directory) - - for plat in platforms: - - if not node.get_exclude_from_clean(): - logging.debug("Cleaning install %s" % package) - utils.tryremove_dir(node.get_install_directory(plat)) - - build_modes = node.get_build_modes() - for build_mode in build_modes: - - logging.debug("Cleaning build %s" % package) - build_directory = node.get_build_directory(plat, build_mode) - utils.tryremove_dir(build_directory) - - # finish well - return True - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt deleted file mode 100644 index 618b3f8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Lib/LibStatic/CMakeLists.txt +++ /dev/null @@ -1,34 +0,0 @@ -cmake_minimum_required(VERSION 2.6) -project(RakNetLibStatic) - -FILE(GLOB ALL_HEADER_SRCS ${RakNet_SOURCE_DIR}/Source/*.h) -FILE(GLOB ALL_CPP_SRCS ${RakNet_SOURCE_DIR}/Source/*.cpp) - -include_directories( ${RAKNET_INTERNAL_INCLUDE_DIRS} ) - -add_library(RakNetLibStatic STATIC ${ALL_CPP_SRCS} ${ALL_HEADER_SRCS} readme.txt) - -IF(WIN32 AND NOT UNIX) - SET( CMAKE_CXX_FLAGS "/D WIN32 /D _RAKNET_LIB /D _CRT_NONSTDC_NO_DEPRECATE /D _CRT_SECURE_NO_DEPRECATE /GS- /GR- ") -ENDIF(WIN32 AND NOT UNIX) - -IF(WIN32 AND NOT UNIX) - target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) - - IF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") - - IF( MSVC10 OR MSVC11 OR MSVC12 OR MSVC14 ) - set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:\"LIBCD.lib LIBCMTD.lib MSVCRT.lib\"" ) - ELSE() - set_target_properties(RakNetLibStatic PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB:"LIBCD.lib LIBCMTD.lib MSVCRT.lib"" ) - ENDIF() - - ENDIF(NOT ${CMAKE_GENERATOR} STREQUAL "MSYS Makefiles") - -ELSE(WIN32 AND NOT UNIX) - target_link_libraries (RakNetLibStatic ${RAKNET_LIBRARY_LIBS}) - INSTALL(TARGETS RakNetLibStatic DESTINATION ${RakNet_SOURCE_DIR}/Lib/RakNetLibStatic) - INSTALL(FILES ${ALL_HEADER_SRCS} DESTINATION ${RakNet_SOURCE_DIR}/include/raknet) -ENDIF(WIN32 AND NOT UNIX) - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp deleted file mode 100644 index 8f20dfa..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/raknet/Source/CCRakNetSlidingWindow.cpp +++ /dev/null @@ -1,372 +0,0 @@ -/* - * Copyright (c) 2014, Oculus VR, Inc. - * All rights reserved. - * - * This source code is licensed under the BSD-style license found in the - * LICENSE file in the root directory of this source tree. An additional grant - * of patent rights can be found in the PATENTS file in the same directory. - * - */ - -#include "CCRakNetSlidingWindow.h" - -#if USE_SLIDING_WINDOW_CONGESTION_CONTROL==1 - -static const double UNSET_TIME_US=-1; - -#if CC_TIME_TYPE_BYTES==4 -static const CCTimeType SYN=10; -#else -static const CCTimeType SYN=10000; -#endif - -#include "MTUSize.h" -#include -#include -#include -#include "RakAssert.h" -#include "RakAlloca.h" - -using namespace RakNet; - -// ****************************************************** PUBLIC METHODS ****************************************************** - -CCRakNetSlidingWindow::CCRakNetSlidingWindow() -{ -} -// ---------------------------------------------------------------------------------------------------------------------------- -CCRakNetSlidingWindow::~CCRakNetSlidingWindow() -{ - -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::Init(CCTimeType curTime, uint32_t maxDatagramPayload) -{ - (void) curTime; - - lastRtt=estimatedRTT=deviationRtt=UNSET_TIME_US; - RakAssert(maxDatagramPayload <= MAXIMUM_MTU_SIZE); - MAXIMUM_MTU_INCLUDING_UDP_HEADER=maxDatagramPayload; - cwnd=maxDatagramPayload; - ssThresh=0.0; - oldestUnsentAck=0; - nextDatagramSequenceNumber=0; - nextCongestionControlBlock=0; - backoffThisBlock=speedUpThisBlock=false; - expectedNextSequenceNumber=0; - _isContinuousSend=false; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::Update(CCTimeType curTime, bool hasDataToSendOrResend) -{ - (void) curTime; - (void) hasDataToSendOrResend; -} -// ---------------------------------------------------------------------------------------------------------------------------- -int CCRakNetSlidingWindow::GetRetransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) -{ - (void) curTime; - (void) isContinuousSend; - (void) timeSinceLastTick; - - return unacknowledgedBytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -int CCRakNetSlidingWindow::GetTransmissionBandwidth(CCTimeType curTime, CCTimeType timeSinceLastTick, uint32_t unacknowledgedBytes, bool isContinuousSend) -{ - (void) curTime; - (void) timeSinceLastTick; - - _isContinuousSend=isContinuousSend; - - if (unacknowledgedBytes<=cwnd) - return (int) (cwnd-unacknowledgedBytes); - else - return 0; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::ShouldSendACKs(CCTimeType curTime, CCTimeType estimatedTimeToNextTick) -{ - CCTimeType rto = GetSenderRTOForACK(); - (void) estimatedTimeToNextTick; - - // iphone crashes on comparison between double and int64 http://www.jenkinssoftware.com/forum/index.php?topic=2717.0 - if (rto==(CCTimeType) UNSET_TIME_US) - { - // Unknown how long until the remote system will retransmit, so better send right away - return true; - } - - return curTime >= oldestUnsentAck + SYN; -} -// ---------------------------------------------------------------------------------------------------------------------------- -DatagramSequenceNumberType CCRakNetSlidingWindow::GetNextDatagramSequenceNumber(void) -{ - return nextDatagramSequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -DatagramSequenceNumberType CCRakNetSlidingWindow::GetAndIncrementNextDatagramSequenceNumber(void) -{ - DatagramSequenceNumberType dsnt=nextDatagramSequenceNumber; - nextDatagramSequenceNumber++; - return dsnt; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendBytes(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnGotPacketPair(DatagramSequenceNumberType datagramSequenceNumber, uint32_t sizeInBytes, CCTimeType curTime) -{ - (void) curTime; - (void) sizeInBytes; - (void) datagramSequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::OnGotPacket(DatagramSequenceNumberType datagramSequenceNumber, bool isContinuousSend, CCTimeType curTime, uint32_t sizeInBytes, uint32_t *skippedMessageCount) -{ - (void) curTime; - (void) sizeInBytes; - (void) isContinuousSend; - - if (oldestUnsentAck==0) - oldestUnsentAck=curTime; - - if (datagramSequenceNumber==expectedNextSequenceNumber) - { - *skippedMessageCount=0; - expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; - } - else if (GreaterThan(datagramSequenceNumber, expectedNextSequenceNumber)) - { - *skippedMessageCount=datagramSequenceNumber-expectedNextSequenceNumber; - // Sanity check, just use timeout resend if this was really valid - if (*skippedMessageCount>1000) - { - // During testing, the nat punchthrough server got 51200 on the first packet. I have no idea where this comes from, but has happened twice - if (*skippedMessageCount>(uint32_t)50000) - return false; - *skippedMessageCount=1000; - } - expectedNextSequenceNumber=datagramSequenceNumber+(DatagramSequenceNumberType)1; - } - else - { - *skippedMessageCount=0; - } - - return true; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnResend(CCTimeType curTime, RakNet::TimeUS nextActionTime) -{ - (void) curTime; - (void) nextActionTime; - - if (_isContinuousSend && backoffThisBlock==false && cwnd>MAXIMUM_MTU_INCLUDING_UDP_HEADER*2) - { - // Spec says 1/2 cwnd, but it never recovers because cwnd increases too slowly - //ssThresh=cwnd-8.0 * (MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd); - ssThresh=cwnd/2; - if (ssThresh ssThresh && ssThresh!=0) - cwnd = ssThresh + MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; - - // CC PRINTF - // printf("++ %.0f Slow start increase.\n", cwnd); - - } - else if (isNewCongestionControlPeriod) - { - cwnd+=MAXIMUM_MTU_INCLUDING_UDP_HEADER*MAXIMUM_MTU_INCLUDING_UDP_HEADER/cwnd; - - // CC PRINTF - // printf("+ %.0f Congestion avoidance increase.\n", cwnd); - } -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnDuplicateAck( CCTimeType curTime, DatagramSequenceNumberType sequenceNumber ) -{ - (void) curTime; - (void) sequenceNumber; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendAckGetBAndAS(CCTimeType curTime, bool *hasBAndAS, BytesPerMicrosecond *_B, BytesPerMicrosecond *_AS) -{ - (void) curTime; - (void) _B; - (void) _AS; - - *hasBAndAS=false; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendAck(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; - - oldestUnsentAck=0; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::OnSendNACK(CCTimeType curTime, uint32_t numBytes) -{ - (void) curTime; - (void) numBytes; - -} -// ---------------------------------------------------------------------------------------------------------------------------- -CCTimeType CCRakNetSlidingWindow::GetRTOForRetransmission(unsigned char timesSent) const -{ - (void) timesSent; - -#if CC_TIME_TYPE_BYTES==4 - const CCTimeType maxThreshold=2000; - //const CCTimeType minThreshold=100; - const CCTimeType additionalVariance=30; -#else - const CCTimeType maxThreshold=2000000; - //const CCTimeType minThreshold=100000; - const CCTimeType additionalVariance=30000; -#endif - - - if (estimatedRTT==UNSET_TIME_US) - return maxThreshold; - - //double u=1.0f; - double u=2.0f; - double q=4.0f; - - CCTimeType threshhold = (CCTimeType) (u * estimatedRTT + q * deviationRtt) + additionalVariance; - if (threshhold > maxThreshold) - return maxThreshold; - return threshhold; -} -// ---------------------------------------------------------------------------------------------------------------------------- -void CCRakNetSlidingWindow::SetMTU(uint32_t bytes) -{ - RakAssert(bytes < MAXIMUM_MTU_SIZE); - MAXIMUM_MTU_INCLUDING_UDP_HEADER=bytes; -} -// ---------------------------------------------------------------------------------------------------------------------------- -uint32_t CCRakNetSlidingWindow::GetMTU(void) const -{ - return MAXIMUM_MTU_INCLUDING_UDP_HEADER; -} -// ---------------------------------------------------------------------------------------------------------------------------- -BytesPerMicrosecond CCRakNetSlidingWindow::GetLocalReceiveRate(CCTimeType currentTime) const -{ - (void) currentTime; - - return 0; // TODO -} -// ---------------------------------------------------------------------------------------------------------------------------- -double CCRakNetSlidingWindow::GetRTT(void) const -{ - if (lastRtt==UNSET_TIME_US) - return 0.0; - return lastRtt; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::GreaterThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) -{ - // a > b? - const DatagramSequenceNumberType halfSpan =(DatagramSequenceNumberType) (((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2); - return b!=a && b-a>halfSpan; -} -// ---------------------------------------------------------------------------------------------------------------------------- -bool CCRakNetSlidingWindow::LessThan(DatagramSequenceNumberType a, DatagramSequenceNumberType b) -{ - // a < b? - const DatagramSequenceNumberType halfSpan = ((DatagramSequenceNumberType)(const uint32_t)-1)/(DatagramSequenceNumberType)2; - return b!=a && b-aGetNetworkID() < data->replica->GetNetworkID()) - return -1; - if (replica3->GetNetworkID() > data->replica->GetNetworkID()) - return 1; - */ - - // 7/28/2013 - If GetNetworkID chagned during runtime, the list would be out of order and lookup would always fail or go out of bounds - // I remember before that I could not directly compare - if (replica3->referenceIndex < data->replica->referenceIndex) - return -1; - if (replica3->referenceIndex > data->replica->referenceIndex) - return 1; - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -LastSerializationResult::LastSerializationResult() -{ - replica=0; - lastSerializationResultBS=0; - whenLastSerialized = RakNet::GetTime(); -} -LastSerializationResult::~LastSerializationResult() -{ - if (lastSerializationResultBS) - RakNet::OP_DELETE(lastSerializationResultBS,_FILE_AND_LINE_); -} -void LastSerializationResult::AllocBS(void) -{ - if (lastSerializationResultBS==0) - { - lastSerializationResultBS=RakNet::OP_NEW(_FILE_AND_LINE_); - } -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::ReplicaManager3() -{ - defaultSendParameters.orderingChannel=0; - defaultSendParameters.priority=HIGH_PRIORITY; - defaultSendParameters.reliability=RELIABLE_ORDERED; - defaultSendParameters.sendReceipt=0; - autoSerializeInterval=30; - lastAutoSerializeOccurance=0; - autoCreateConnections=true; - autoDestroyConnections=true; - currentlyDeallocatingReplica=0; - - for (unsigned int i=0; i < 255; i++) - worldsArray[i]=0; - - AddWorld(0); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::~ReplicaManager3() -{ - if (autoDestroyConnections) - { - for (unsigned int i=0; i < worldsList.Size(); i++) - { - RakAssert(worldsList[i]->connectionList.Size()==0); - } - } - Clear(true); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetAutoManageConnections(bool autoCreate, bool autoDestroy) -{ - autoCreateConnections=autoCreate; - autoDestroyConnections=autoDestroy; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAutoCreateConnections(void) const -{ - return autoCreateConnections; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAutoDestroyConnections(void) const -{ - return autoDestroyConnections; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::AutoCreateConnectionList( - DataStructures::List &participantListIn, - DataStructures::List &participantListOut, - WorldId worldId) -{ - for (unsigned int index=0; index < participantListIn.Size(); index++) - { - if (GetConnectionByGUID(participantListIn[index], worldId)) - { - Connection_RM3 *connection = AllocConnection(rakPeerInterface->GetSystemAddressFromGuid(participantListIn[index]), participantListIn[index]); - if (connection) - { - PushConnection(connection); - participantListOut.Push(connection, _FILE_AND_LINE_); - } - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::PushConnection(RakNet::Connection_RM3 *newConnection, WorldId worldId) -{ - if (newConnection==0) - return false; - if (GetConnectionByGUID(newConnection->GetRakNetGUID(), worldId)) - return false; - // Was this intended? - RakAssert(newConnection->GetRakNetGUID()!=rakPeerInterface->GetMyGUID()); - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index = world->connectionList.GetIndexOf(newConnection); - if (index==(unsigned int)-1) - { - world->connectionList.Push(newConnection,_FILE_AND_LINE_); - - // Send message to validate the connection - newConnection->SendValidation(rakPeerInterface, worldId); - - Connection_RM3::ConstructionMode constructionMode = newConnection->QueryConstructionMode(); - if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int pushIdx; - for (pushIdx=0; pushIdx < world->userReplicaList.Size(); pushIdx++) - newConnection->OnLocalReference(world->userReplicaList[pushIdx], this); - } - } - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::DeallocReplicaNoBroadcastDestruction(RakNet::Connection_RM3 *connection, RakNet::Replica3 *replica3) -{ - currentlyDeallocatingReplica=replica3; - replica3->DeallocReplica(connection); - currentlyDeallocatingReplica=0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNet::Connection_RM3 * ReplicaManager3::PopConnection(unsigned int index, WorldId worldId) -{ - DataStructures::List replicaList; - DataStructures::List destructionList; - DataStructures::List broadcastList; - RakNet::Connection_RM3 *connection; - unsigned int index2; - RM3ActionOnPopConnection action; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - connection=world->connectionList[index]; - - // Clear out downloadGroup - connection->ClearDownloadGroup(rakPeerInterface); - - RakNetGUID guid = connection->GetRakNetGUID(); - // This might be wrong, I am relying on the variable creatingSystemGuid which is transmitted - // automatically from the first system to reference the object. However, if an object changes - // owners then it is not going to be returned here, and therefore QueryActionOnPopConnection() - // will not be called for the new owner. - GetReplicasCreatedByGuid(guid, replicaList); - - for (index2=0; index2 < replicaList.Size(); index2++) - { - action = replicaList[index2]->QueryActionOnPopConnection(connection); - replicaList[index2]->OnPoppedConnection(connection); - if (action==RM3AOPC_DELETE_REPLICA) - { - if (replicaList[index2]->GetNetworkIDManager()) - destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); - } - else if (action==RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION) - { - if (replicaList[index2]->GetNetworkIDManager()) - destructionList.Push( replicaList[index2]->GetNetworkID(), _FILE_AND_LINE_ ); - - broadcastList.Push( replicaList[index2], _FILE_AND_LINE_ ); - } - else if (action==RM3AOPC_DO_NOTHING) - { - for (unsigned int index3 = 0; index3 < connection->queryToSerializeReplicaList.Size(); index3++) - { - LastSerializationResult *lsr = connection->queryToSerializeReplicaList[index3]; - lsr->whenLastSerialized=0; - if (lsr->lastSerializationResultBS) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - lsr->lastSerializationResultBS->bitStream[z].Reset(); - } - } - } - } - - BroadcastDestructionList(broadcastList, connection->GetSystemAddress()); - for (index2=0; index2 < destructionList.Size(); index2++) - { - // Do lookup in case DeallocReplica destroyed one of of the later Replica3 instances in the list - Replica3* replicaToDestroy = world->networkIDManager->GET_OBJECT_FROM_ID(destructionList[index2]); - if (replicaToDestroy) - { - replicaToDestroy->PreDestruction(connection); - replicaToDestroy->DeallocReplica(connection); - } - } - - world->connectionList.RemoveAtIndex(index); - return connection; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNet::Connection_RM3 * ReplicaManager3::PopConnection(RakNetGUID guid, WorldId worldId) -{ - unsigned int index; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetRakNetGUID()==guid) - { - return PopConnection(index, worldId); - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Reference(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index = ReferenceInternal(replica3, worldId); - - if (index!=(unsigned int)-1) - { - unsigned int pushIdx; - for (pushIdx=0; pushIdx < world->connectionList.Size(); pushIdx++) - { - Connection_RM3::ConstructionMode constructionMode = world->connectionList[pushIdx]->QueryConstructionMode(); - if (constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==Connection_RM3::QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - world->connectionList[pushIdx]->OnLocalReference(replica3, this); - } - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned int ReplicaManager3::ReferenceInternal(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - index = world->userReplicaList.GetIndexOf(replica3); - if (index==(unsigned int)-1) - { - RakAssert(world->networkIDManager); - replica3->SetNetworkIDManager(world->networkIDManager); - // If it crashes on rakPeerInterface==0 then you didn't call RakPeerInterface::AttachPlugin() - if (replica3->creatingSystemGUID==UNASSIGNED_RAKNET_GUID) - replica3->creatingSystemGUID=rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - replica3->replicaManager=this; - if (replica3->referenceIndex==(uint32_t)-1) - { - replica3->referenceIndex=nextReferenceIndex++; - } - world->userReplicaList.Push(replica3,_FILE_AND_LINE_); - return world->userReplicaList.Size()-1; - } - return (unsigned int) -1; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Dereference(RakNet::Replica3 *replica3, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index, index2; - for (index=0; index < world->userReplicaList.Size(); index++) - { - if (world->userReplicaList[index]==replica3) - { - world->userReplicaList.RemoveAtIndex(index); - break; - } - } - - // Remove from all connections - for (index2=0; index2 < world->connectionList.Size(); index2++) - { - world->connectionList[index2]->OnDereference(replica3, this); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::DereferenceList(DataStructures::List &replicaListIn, WorldId worldId) -{ - unsigned int index; - for (index=0; index < replicaListIn.Size(); index++) - Dereference(replicaListIn[index], worldId); -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReplicasCreatedByMe(DataStructures::List &replicaListOut, WorldId worldId) -{ - //RakNetGUID myGuid = rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - GetReplicasCreatedByGuid(rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS), replicaListOut, worldId); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReferencedReplicaList(DataStructures::List &replicaListOut, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - replicaListOut=world->userReplicaList; -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetReplicasCreatedByGuid(RakNetGUID guid, DataStructures::List &replicaListOut, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - replicaListOut.Clear(false,_FILE_AND_LINE_); - unsigned int index; - for (index=0; index < world->userReplicaList.Size(); index++) - { - if (world->userReplicaList[index]->creatingSystemGUID==guid) - replicaListOut.Push(world->userReplicaList[index],_FILE_AND_LINE_); - } -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned ReplicaManager3::GetReplicaCount(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->userReplicaList.Size(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3 *ReplicaManager3::GetReplicaAtIndex(unsigned index, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->userReplicaList[index]; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -unsigned int ReplicaManager3::GetConnectionCount(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->connectionList.Size(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionAtIndex(unsigned index, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->connectionList[index]; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionBySystemAddress(const SystemAddress &sa, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetSystemAddress()==sa) - { - return world->connectionList[index]; - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3* ReplicaManager3::GetConnectionByGUID(RakNetGUID guid, WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetRakNetGUID()==guid) - { - return world->connectionList[index]; - } - } - return 0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultOrderingChannel(char def) -{ - defaultSendParameters.orderingChannel=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultPacketPriority(PacketPriority def) -{ - defaultSendParameters.priority=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetDefaultPacketReliability(PacketReliability def) -{ - defaultSendParameters.reliability=def; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetAutoSerializeInterval(RakNet::Time intervalMS) -{ - autoSerializeInterval=intervalMS; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::GetConnectionsThatHaveReplicaConstructed(Replica3 *replica, DataStructures::List &connectionsThatHaveConstructedThisReplica, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - connectionsThatHaveConstructedThisReplica.Clear(false,_FILE_AND_LINE_); - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->HasReplicaConstructed(replica)) - connectionsThatHaveConstructedThisReplica.Push(world->connectionList[index],_FILE_AND_LINE_); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool ReplicaManager3::GetAllConnectionDownloadsCompleted(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - unsigned int index; - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->GetDownloadWasCompleted()==false) - return false; - } - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::Clear(bool deleteWorlds) -{ - for (unsigned int i=0; i < worldsList.Size(); i++) - { - worldsList[i]->Clear(this); - if (deleteWorlds) - { - worldsArray[worldsList[i]->worldId]=0; - delete worldsList[i]; - } - } - if (deleteWorlds) - worldsList.Clear(false, _FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -ReplicaManager3::RM3World::RM3World() -{ - networkIDManager=0; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::RM3World::Clear(ReplicaManager3 *replicaManager3) -{ - if (replicaManager3->GetAutoDestroyConnections()) - { - for (unsigned int i=0; i < connectionList.Size(); i++) - replicaManager3->DeallocConnection(connectionList[i]); - } - else - { - // Clear out downloadGroup even if not auto destroying the connection, since the packets need to go back to RakPeer - for (unsigned int i=0; i < connectionList.Size(); i++) - connectionList[i]->ClearDownloadGroup(replicaManager3->GetRakPeerInterface()); - } - - for (unsigned int i=0; i < userReplicaList.Size(); i++) - { - userReplicaList[i]->replicaManager=0; - userReplicaList[i]->SetNetworkIDManager(0); - } - connectionList.Clear(true,_FILE_AND_LINE_); - userReplicaList.Clear(true,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PRO ReplicaManager3::GetDefaultSendParameters(void) const -{ - return defaultSendParameters; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::AddWorld(WorldId worldId) -{ - RakAssert(worldsArray[worldId]==0 && "World already in use"); - - RM3World *newWorld = RakNet::OP_NEW(_FILE_AND_LINE_); - newWorld->worldId=worldId; - worldsArray[worldId]=newWorld; - worldsList.Push(newWorld,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::RemoveWorld(WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - for (unsigned int i=0; i < worldsList.Size(); i++) - { - if (worldsList[i]==worldsArray[worldId]) - { - RakNet::OP_DELETE(worldsList[i],_FILE_AND_LINE_); - worldsList.RemoveAtIndexFast(i); - break; - } - } - worldsArray[worldId]=0; - -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -NetworkIDManager *ReplicaManager3::GetNetworkIDManager(WorldId worldId) const -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - return world->networkIDManager; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::SetNetworkIDManager(NetworkIDManager *_networkIDManager, WorldId worldId) -{ - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - world->networkIDManager=_networkIDManager; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnReceive(Packet *packet) -{ - if (packet->length<2) - return RR_CONTINUE_PROCESSING; - - WorldId incomingWorldId; - - RakNet::Time timestamp=0; - unsigned char packetIdentifier, packetDataOffset; - if ( ( unsigned char ) packet->data[ 0 ] == ID_TIMESTAMP ) - { - if ( packet->length > sizeof( unsigned char ) + sizeof( RakNet::Time ) ) - { - packetIdentifier = ( unsigned char ) packet->data[ sizeof( unsigned char ) + sizeof( RakNet::Time ) ]; - // Required for proper endian swapping - RakNet::BitStream tsBs(packet->data+sizeof(MessageID),packet->length-1,false); - tsBs.Read(timestamp); - // Next line assumes worldId is only 1 byte - RakAssert(sizeof(WorldId)==1); - incomingWorldId=packet->data[sizeof( unsigned char )*2 + sizeof( RakNet::Time )]; - packetDataOffset=sizeof( unsigned char )*3 + sizeof( RakNet::Time ); - } - else - return RR_STOP_PROCESSING_AND_DEALLOCATE; - } - else - { - packetIdentifier = ( unsigned char ) packet->data[ 0 ]; - // Next line assumes worldId is only 1 byte - RakAssert(sizeof(WorldId)==1); - incomingWorldId=packet->data[sizeof( unsigned char )]; - packetDataOffset=sizeof( unsigned char )*2; - } - - if (worldsArray[incomingWorldId]==0) - return RR_CONTINUE_PROCESSING; - - switch (packetIdentifier) - { - case ID_REPLICA_MANAGER_CONSTRUCTION: - return OnConstruction(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - case ID_REPLICA_MANAGER_SERIALIZE: - return OnSerialize(packet, packet->data, packet->length, packet->guid, timestamp, packetDataOffset, incomingWorldId); - case ID_REPLICA_MANAGER_DOWNLOAD_STARTED: - if (packet->wasGeneratedLocally==false) - { - return OnDownloadStarted(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - } - else - break; - case ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE: - if (packet->wasGeneratedLocally==false) - { - return OnDownloadComplete(packet, packet->data, packet->length, packet->guid, packetDataOffset, incomingWorldId); - } - else - break; - case ID_REPLICA_MANAGER_SCOPE_CHANGE: - { - Connection_RM3 *connection = GetConnectionByGUID(packet->guid, incomingWorldId); - if (connection && connection->isValidated==false) - { - // This connection is now confirmed bidirectional - connection->isValidated=true; - // Reply back on validation - connection->SendValidation(rakPeerInterface,incomingWorldId); - } - } - } - - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::AutoConstructByQuery(ReplicaManager3 *replicaManager3, WorldId worldId) -{ - ValidateLists(replicaManager3); - - ConstructionMode constructionMode = QueryConstructionMode(); - - unsigned int index; - RM3ConstructionState constructionState; - LastSerializationResult *lsr; - index=0; - - constructedReplicasCulled.Clear(false,_FILE_AND_LINE_); - destroyedReplicasCulled.Clear(false,_FILE_AND_LINE_); - - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - while (index < queryToConstructReplicaList.Size()) - { - lsr=queryToConstructReplicaList[index]; - constructionState=lsr->replica->QueryConstruction(this, replicaManager3); - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY || constructionState==RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT) - { - OnReplicaAlreadyExists(index, replicaManager3); - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) - constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - - /* - if (constructionState==RM3CS_ALREADY_EXISTS_REMOTELY) - { - // Serialize construction data to this connection - RakNet::BitStream bsOut; - bsOut.Write((MessageID)ID_REPLICA_MANAGER_3_SERIALIZE_CONSTRUCTION_EXISTING); - bsOut.Write(replicaManager3->GetWorldID()); - NetworkID networkId; - networkId=lsr->replica->GetNetworkID(); - bsOut.Write(networkId); - BitSize_t bitsWritten = bsOut.GetNumberOfBitsUsed(); - lsr->replica->SerializeConstructionExisting(&bsOut, this); - if (bsOut.GetNumberOfBitsUsed()!=bitsWritten) - replicaManager3->SendUnified(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,GetSystemAddress(), false); - } - - // Serialize first serialization to this connection. - // This is done here, as it isn't done in PushConstruction - SerializeParameters sp; - RakNet::BitStream emptyBs; - for (index=0; index < (unsigned int) RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) - { - sp.lastSentBitstream[index]=&emptyBs; - sp.pro[index]=replicaManager3->GetDefaultSendParameters(); - } - sp.bitsWrittenSoFar=0; - sp.destinationConnection=this; - sp.messageTimestamp=0; - sp.whenLastSerialized=0; - - RakNet::Replica3 *replica = lsr->replica; - - RM3SerializationResult res = replica->Serialize(&sp); - if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && - res!=RM3SR_DO_NOT_SERIALIZE && - res!=RM3SR_SERIALIZED_UNIQUELY) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - } - if (SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, replicaManager3->GetRakPeerInterface(), replicaManager3->GetWorldID())==SSICR_SENT_DATA) - lsr->replica->whenLastSerialized=RakNet::GetTimeMS(); - } - */ - } - else if (constructionState==RM3CS_SEND_CONSTRUCTION) - { - OnConstructToThisConnection(index, replicaManager3); - RakAssert(lsr->replica); - constructedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - } - else if (constructionState==RM3CS_NEVER_CONSTRUCT) - { - OnNeverConstruct(index, replicaManager3); - } - else// if (constructionState==RM3CS_NO_ACTION) - { - // Do nothing - index++; - } - } - - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - RM3DestructionState destructionState; - index=0; - while (index < queryToDestructReplicaList.Size()) - { - lsr=queryToDestructReplicaList[index]; - destructionState=lsr->replica->QueryDestruction(this, replicaManager3); - if (destructionState==RM3DS_SEND_DESTRUCTION) - { - OnSendDestructionFromQuery(index, replicaManager3); - destroyedReplicasCulled.Push(lsr->replica,_FILE_AND_LINE_); - } - else if (destructionState==RM3DS_DO_NOT_QUERY_DESTRUCTION) - { - OnDoNotQueryDestruction(index, replicaManager3); - } - else// if (destructionState==RM3CS_NO_ACTION) - { - // Do nothing - index++; - } - } - } - } - else if (constructionMode==QUERY_CONNECTION_FOR_REPLICA_LIST) - { - QueryReplicaList(constructedReplicasCulled,destroyedReplicasCulled); - - unsigned int idx1, idx2; - - // Create new - for (idx2=0; idx2 < constructedReplicasCulled.Size(); idx2++) - OnConstructToThisConnection(constructedReplicasCulled[idx2], replicaManager3); - - bool exists; - for (idx2=0; idx2 < destroyedReplicasCulled.Size(); idx2++) - { - exists=false; - bool objectExists; - idx1=constructedReplicaList.GetIndexFromKey(destroyedReplicasCulled[idx2], &objectExists); - if (objectExists) - { - constructedReplicaList.RemoveAtIndex(idx1); - - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]->replica==destroyedReplicasCulled[idx2] ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - } - } - } - - SendConstruction(constructedReplicasCulled,destroyedReplicasCulled,replicaManager3->defaultSendParameters,replicaManager3->rakPeerInterface,worldId,replicaManager3); -} -void ReplicaManager3::Update(void) -{ - unsigned int index,index2,index3; - - WorldId worldId; - RM3World *world; - RakNet::Time time = RakNet::GetTime(); - - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - worldId = world->worldId; - - for (index=0; index < world->connectionList.Size(); index++) - { - if (world->connectionList[index]->isValidated==false) - continue; - world->connectionList[index]->AutoConstructByQuery(this, worldId); - } - } - - if (time - lastAutoSerializeOccurance >= autoSerializeInterval) - { - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - worldId = world->worldId; - - for (index=0; index < world->userReplicaList.Size(); index++) - { - world->userReplicaList[index]->forceSendUntilNextUpdate=false; - world->userReplicaList[index]->OnUserReplicaPreSerializeTick(); - } - - unsigned int index; - SerializeParameters sp; - sp.curTime=time; - Connection_RM3 *connection; - SendSerializeIfChangedResult ssicr; - LastSerializationResult *lsr; - - sp.messageTimestamp=0; - for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) - sp.pro[i]=defaultSendParameters; - index2=0; - for (index=0; index < world->connectionList.Size(); index++) - { - connection = world->connectionList[index]; - sp.bitsWrittenSoFar=0; - index2=0; - sp.destinationConnection=connection; - - DataStructures::List replicasToSerialize; - replicasToSerialize.Clear(true, _FILE_AND_LINE_); - if (connection->QuerySerializationList(replicasToSerialize)) - { - // Update replica->lsr so we can lookup in the next block - // lsr is per connection / per replica - while (index2 < connection->queryToSerializeReplicaList.Size()) - { - connection->queryToSerializeReplicaList[index2]->replica->lsr=connection->queryToSerializeReplicaList[index2]; - index2++; - } - - - // User is manually specifying list of replicas to serialize - index2=0; - while (index2 < replicasToSerialize.Size()) - { - lsr=replicasToSerialize[index2]->lsr; - RakAssert(lsr->replica==replicasToSerialize[index2]); - - sp.whenLastSerialized=lsr->whenLastSerialized; - ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); - if (ssicr==SSICR_SENT_DATA) - lsr->whenLastSerialized=time; - index2++; - } - } - else - { - while (index2 < connection->queryToSerializeReplicaList.Size()) - { - lsr=connection->queryToSerializeReplicaList[index2]; - - sp.destinationConnection=connection; - sp.whenLastSerialized=lsr->whenLastSerialized; - ssicr=connection->SendSerializeIfChanged(lsr, &sp, GetRakPeerInterface(), worldId, this, time); - if (ssicr==SSICR_SENT_DATA) - { - lsr->whenLastSerialized=time; - index2++; - } - else if (ssicr==SSICR_NEVER_SERIALIZE) - { - // Removed from the middle of the list - } - else - index2++; - } - } - } - } - - lastAutoSerializeOccurance=time; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnClosedConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason ) -{ - (void) lostConnectionReason; - (void) systemAddress; - if (autoDestroyConnections) - { - Connection_RM3 *connection = PopConnection(rakNetGUID); - if (connection) - DeallocConnection(connection); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnNewConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, bool isIncoming) -{ - (void) isIncoming; - if (autoCreateConnections) - { - Connection_RM3 *connection = AllocConnection(systemAddress, rakNetGUID); - if (connection) - PushConnection(connection); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnRakPeerShutdown(void) -{ - if (autoDestroyConnections) - { - RM3World *world; - unsigned int index3; - for (index3=0; index3 < worldsList.Size(); index3++) - { - world = worldsList[index3]; - - while (world->connectionList.Size()) - { - Connection_RM3 *connection = PopConnection(world->connectionList.Size()-1, world->worldId); - if (connection) - DeallocConnection(connection); - } - } - } - - - Clear(false); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void ReplicaManager3::OnDetach(void) -{ - OnRakPeerShutdown(); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnConstruction(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - RM3World *world = worldsArray[worldId]; - - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - { - // Almost certainly a bug - RakAssert("Got OnConstruction but no connection yet" && 0); - return RR_CONTINUE_PROCESSING; - } - if (connection->groupConstructionAndSerialize) - { - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - uint16_t constructionObjectListSize, destructionObjectListSize, index, index2; - BitSize_t streamEnd, writeAllocationIDEnd; - Replica3 *replica; - NetworkID networkId; - RakNetGUID creatingSystemGuid; - bool actuallyCreateObject=false; - - DataStructures::List actuallyCreateObjectList; - DataStructures::List constructionTickStack; - - RakAssert(world->networkIDManager); - - bsIn.Read(constructionObjectListSize); - for (index=0; index < constructionObjectListSize; index++) - { - bsIn.Read(streamEnd); - bsIn.Read(networkId); - Replica3* existingReplica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - bsIn.Read(actuallyCreateObject); - actuallyCreateObjectList.Push(actuallyCreateObject, _FILE_AND_LINE_); - bsIn.AlignReadToByteBoundary(); - - if (actuallyCreateObject) - { - bsIn.Read(creatingSystemGuid); - bsIn.Read(writeAllocationIDEnd); - - //printf("OnConstruction: %i\n",networkId.guid.g); // Removeme - if (existingReplica) - { - existingReplica->replicaManager=this; - - // Network ID already in use - connection->OnDownloadExisting(existingReplica, this); - - constructionTickStack.Push(0, _FILE_AND_LINE_); - bsIn.SetReadOffset(streamEnd); - continue; - } - - bsIn.AlignReadToByteBoundary(); - replica = connection->AllocReplica(&bsIn, this); - if (replica==0) - { - constructionTickStack.Push(0, _FILE_AND_LINE_); - bsIn.SetReadOffset(streamEnd); - continue; - } - - // Go past the bitStream written to with WriteAllocationID(). Necessary in case the user didn't read out the bitStream the same way it was written - // bitOffset2 is already aligned - bsIn.SetReadOffset(writeAllocationIDEnd); - - replica->SetNetworkIDManager(world->networkIDManager); - replica->SetNetworkID(networkId); - - replica->replicaManager=this; - replica->creatingSystemGUID=creatingSystemGuid; - - if (!replica->QueryRemoteConstruction(connection) || - !replica->DeserializeConstruction(&bsIn, connection)) - { - DeallocReplicaNoBroadcastDestruction(connection, replica); - bsIn.SetReadOffset(streamEnd); - constructionTickStack.Push(0, _FILE_AND_LINE_); - continue; - } - - constructionTickStack.Push(replica, _FILE_AND_LINE_); - - // Register the replica - ReferenceInternal(replica, worldId); - } - else - { - if (existingReplica) - { - existingReplica->DeserializeConstructionExisting(&bsIn, connection); - constructionTickStack.Push(existingReplica, _FILE_AND_LINE_); - } - else - { - constructionTickStack.Push(0, _FILE_AND_LINE_); - } - } - - - bsIn.SetReadOffset(streamEnd); - bsIn.AlignReadToByteBoundary(); - } - - RakAssert(constructionTickStack.Size()==constructionObjectListSize); - RakAssert(actuallyCreateObjectList.Size()==constructionObjectListSize); - - RakNet::BitStream empty; - for (index=0; index < constructionObjectListSize; index++) - { - bool pdcWritten=false; - bsIn.Read(pdcWritten); - if (pdcWritten) - { - bsIn.AlignReadToByteBoundary(); - bsIn.Read(streamEnd); - bsIn.Read(networkId); - if (constructionTickStack[index]!=0) - { - bsIn.AlignReadToByteBoundary(); - if (actuallyCreateObjectList[index]) - constructionTickStack[index]->PostDeserializeConstruction(&bsIn, connection); - else - constructionTickStack[index]->PostDeserializeConstructionExisting(&bsIn, connection); - } - bsIn.SetReadOffset(streamEnd); - } - else - { - if (constructionTickStack[index]!=0) - { - if (actuallyCreateObjectList[index]) - constructionTickStack[index]->PostDeserializeConstruction(&empty, connection); - else - constructionTickStack[index]->PostDeserializeConstructionExisting(&empty, connection); - } - } - } - - for (index=0; index < constructionObjectListSize; index++) - { - if (constructionTickStack[index]!=0) - { - if (actuallyCreateObjectList[index]) - { - // Tell the connection(s) that this object exists since they just sent it to us - connection->OnDownloadFromThisSystem(constructionTickStack[index], this); - - for (index2=0; index2 < world->connectionList.Size(); index2++) - { - if (world->connectionList[index2]!=connection) - world->connectionList[index2]->OnDownloadFromOtherSystem(constructionTickStack[index], this); - } - } - } - } - - // Destructions - bool b = bsIn.Read(destructionObjectListSize); - (void) b; - RakAssert(b); - for (index=0; index < destructionObjectListSize; index++) - { - bsIn.Read(networkId); - bsIn.Read(streamEnd); - replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - if (replica==0) - { - // Unknown object - bsIn.SetReadOffset(streamEnd); - continue; - } - bsIn.Read(replica->deletingSystemGUID); - if (replica->DeserializeDestruction(&bsIn,connection)) - { - // Make sure it wasn't deleted in DeserializeDestruction - if (world->networkIDManager->GET_OBJECT_FROM_ID(networkId)) - { - replica->PreDestruction(connection); - - // Forward deletion by remote system - if (replica->QueryRelayDestruction(connection)) - BroadcastDestruction(replica,connection->GetSystemAddress()); - Dereference(replica); - DeallocReplicaNoBroadcastDestruction(connection, replica); - } - } - else - { - replica->PreDestruction(connection); - connection->OnDereference(replica, this); - } - - bsIn.AlignReadToByteBoundary(); - } - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnSerialize(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, RakNet::Time timestamp, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - if (connection->groupConstructionAndSerialize) - { - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - RM3World *world = worldsArray[worldId]; - RakAssert(world->networkIDManager); - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - - struct DeserializeParameters ds; - ds.timeStamp=timestamp; - ds.sourceConnection=connection; - - Replica3 *replica; - NetworkID networkId; - BitSize_t bitsUsed; - bsIn.Read(networkId); - //printf("OnSerialize: %i\n",networkId.guid.g); // Removeme - replica = world->networkIDManager->GET_OBJECT_FROM_ID(networkId); - if (replica) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - bsIn.Read(ds.bitstreamWrittenTo[z]); - if (ds.bitstreamWrittenTo[z]) - { - bsIn.ReadCompressed(bitsUsed); - bsIn.AlignReadToByteBoundary(); - bsIn.Read(ds.serializationBitstream[z], bitsUsed); - } - } - replica->Deserialize(&ds); - } - return RR_CONTINUE_PROCESSING; -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnDownloadStarted(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - if (connection->QueryGroupDownloadMessages() && - // ID_DOWNLOAD_STARTED will be processed twice, being processed the second time once ID_DOWNLOAD_COMPLETE arrives. - // However, the second time groupConstructionAndSerialize will be set to true so it won't be processed a third time - connection->groupConstructionAndSerialize==false - ) - { - // These messages will be held by the plugin and returned when the download is complete - connection->groupConstructionAndSerialize=true; - RakAssert(connection->downloadGroup.Size()==0); - connection->downloadGroup.Push(packet, __FILE__, __LINE__); - return RR_STOP_PROCESSING; - } - - connection->groupConstructionAndSerialize=false; - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - connection->DeserializeOnDownloadStarted(&bsIn); - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -PluginReceiveResult ReplicaManager3::OnDownloadComplete(Packet *packet, unsigned char *packetData, int packetDataLength, RakNetGUID senderGuid, unsigned char packetDataOffset, WorldId worldId) -{ - Connection_RM3 *connection = GetConnectionByGUID(senderGuid, worldId); - if (connection==0) - return RR_CONTINUE_PROCESSING; - - if (connection->groupConstructionAndSerialize==true && connection->downloadGroup.Size()>0) - { - // Push back buffered packets in front of this one - unsigned int i; - for (i=0; i < connection->downloadGroup.Size(); i++) - rakPeerInterface->PushBackPacket(connection->downloadGroup[i],false); - - // Push this one to be last too. It will be processed again, but the second time - // groupConstructionAndSerialize will be false and downloadGroup will be empty, so it will go past this block - connection->downloadGroup.Clear(__FILE__,__LINE__); - rakPeerInterface->PushBackPacket(packet,false); - - return RR_STOP_PROCESSING; - } - - RakNet::BitStream bsIn(packetData,packetDataLength,false); - bsIn.IgnoreBytes(packetDataOffset); - connection->gotDownloadComplete=true; - connection->DeserializeOnDownloadComplete(&bsIn); - return RR_CONTINUE_PROCESSING; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3* ReplicaManager3::GetReplicaByNetworkID(NetworkID networkId, WorldId worldId) -{ - RM3World *world = worldsArray[worldId]; - - unsigned int i; - for (i=0; i < world->userReplicaList.Size(); i++) - { - if (world->userReplicaList[i]->GetNetworkID()==networkId) - return world->userReplicaList[i]; - } - return 0; -} - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -void ReplicaManager3::BroadcastDestructionList(DataStructures::List &replicaListSource, const SystemAddress &exclusionAddress, WorldId worldId) -{ - RakNet::BitStream bsOut; - unsigned int i,j; - - RakAssert(worldsArray[worldId]!=0 && "World not in use"); - RM3World *world = worldsArray[worldId]; - - DataStructures::List replicaList; - - for (i=0; i < replicaListSource.Size(); i++) - { - if (replicaListSource[i]==currentlyDeallocatingReplica) - continue; - replicaList.Push(replicaListSource[i], __FILE__, __LINE__); - } - - if (replicaList.Size()==0) - return; - - for (i=0; i < replicaList.Size(); i++) - { - if (replicaList[i]->deletingSystemGUID==UNASSIGNED_RAKNET_GUID) - replicaList[i]->deletingSystemGUID=GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - } - - for (j=0; j < world->connectionList.Size(); j++) - { - if (world->connectionList[j]->GetSystemAddress()==exclusionAddress) - continue; - - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); - bsOut.Write(worldId); - uint16_t cnt=0; - bsOut.Write(cnt); // No construction - cnt=(uint16_t) replicaList.Size(); - BitSize_t cntOffset=bsOut.GetWriteOffset();; - bsOut.Write(cnt); // Overwritten at send call - cnt=0; - - for (i=0; i < replicaList.Size(); i++) - { - if (world->connectionList[j]->HasReplicaConstructed(replicaList[i])==false) - continue; - cnt++; - - NetworkID networkId; - networkId=replicaList[i]->GetNetworkID(); - bsOut.Write(networkId); - BitSize_t offsetStart, offsetEnd; - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); - bsOut.Write(replicaList[i]->deletingSystemGUID); - replicaList[i]->SerializeDestruction(&bsOut, world->connectionList[j]); - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - - if (cnt>0) - { - BitSize_t curOffset=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(cntOffset); - bsOut.Write(cnt); - bsOut.SetWriteOffset(curOffset); - rakPeerInterface->Send(&bsOut,defaultSendParameters.priority,defaultSendParameters.reliability,defaultSendParameters.orderingChannel,world->connectionList[j]->GetSystemAddress(),false, defaultSendParameters.sendReceipt); - } - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -void ReplicaManager3::BroadcastDestruction(Replica3 *replica, const SystemAddress &exclusionAddress) -{ - DataStructures::List replicaList; - replicaList.Push(replica, _FILE_AND_LINE_ ); - BroadcastDestructionList(replicaList,exclusionAddress); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3::Connection_RM3(const SystemAddress &_systemAddress, RakNetGUID _guid) -: systemAddress(_systemAddress), guid(_guid) -{ - isValidated=false; - isFirstConstruction=true; - groupConstructionAndSerialize=false; - gotDownloadComplete=false; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Connection_RM3::~Connection_RM3() -{ - unsigned int i; - for (i=0; i < constructedReplicaList.Size(); i++) - RakNet::OP_DELETE(constructedReplicaList[i], _FILE_AND_LINE_); - for (i=0; i < queryToConstructReplicaList.Size(); i++) - RakNet::OP_DELETE(queryToConstructReplicaList[i], _FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::GetConstructedReplicas(DataStructures::List &objectsTheyDoHave) -{ - objectsTheyDoHave.Clear(true,_FILE_AND_LINE_); - for (unsigned int idx=0; idx < constructedReplicaList.Size(); idx++) - { - objectsTheyDoHave.Push(constructedReplicaList[idx]->replica, _FILE_AND_LINE_ ); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Connection_RM3::HasReplicaConstructed(RakNet::Replica3 *replica) -{ - bool objectExists; - constructedReplicaList.GetIndexFromKey(replica, &objectExists); - return objectExists; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::SendSerializeHeader(RakNet::Replica3 *replica, RakNet::Time timestamp, RakNet::BitStream *bs, WorldId worldId) -{ - bs->Reset(); - - if (timestamp!=0) - { - bs->Write((MessageID)ID_TIMESTAMP); - bs->Write(timestamp); - } - bs->Write((MessageID)ID_REPLICA_MANAGER_SERIALIZE); - bs->Write(worldId); - bs->Write(replica->GetNetworkID()); -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::ClearDownloadGroup(RakPeerInterface *rakPeerInterface) -{ - unsigned int i; - for (i=0; i < downloadGroup.Size(); i++) - rakPeerInterface->DeallocatePacket(downloadGroup[i]); - downloadGroup.Clear(__FILE__,__LINE__); -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -SendSerializeIfChangedResult Connection_RM3::SendSerialize(RakNet::Replica3 *replica, bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::BitStream serializationData[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakNet::Time timestamp, PRO sendParameters[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS], RakPeerInterface *rakPeer, unsigned char worldId, RakNet::Time curTime) -{ - bool channelHasData; - BitSize_t sum=0; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (indicesToSend[z]) - sum+=serializationData[z].GetNumberOfBitsUsed(); - } - - RakNet::BitStream out; - BitSize_t bitsPerChannel[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - - if (sum==0) - { - memset(bitsPerChannel, 0, sizeof(bitsPerChannel)); - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - return SSICR_DID_NOT_SEND_DATA; - } - - RakAssert(replica->GetNetworkID()!=UNASSIGNED_NETWORK_ID); - - BitSize_t bitsUsed; - - int channelIndex; - PRO lastPro=sendParameters[0]; - - for (channelIndex=0; channelIndex < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex++) - { - if (channelIndex==0) - { - SendSerializeHeader(replica, timestamp, &out, worldId); - } - else if (lastPro!=sendParameters[channelIndex]) - { - // Write out remainder - for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) - { - bitsPerChannel[channelIndex2]=0; - out.Write(false); - } - - // Send remainder - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); - - // If no data left to send, quit out - bool anyData=false; - for (int channelIndex2=channelIndex; channelIndex2 < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; channelIndex2++) - { - if (serializationData[channelIndex2].GetNumberOfBitsUsed()>0) - { - anyData=true; - break; - } - } - if (anyData==false) - return SSICR_SENT_DATA; - - // Restart stream - SendSerializeHeader(replica, timestamp, &out, worldId); - - for (int channelIndex2=0; channelIndex2 < channelIndex; channelIndex2++) - { - bitsPerChannel[channelIndex2]=0; - out.Write(false); - } - lastPro=sendParameters[channelIndex]; - } - - bitsUsed=serializationData[channelIndex].GetNumberOfBitsUsed(); - channelHasData = indicesToSend[channelIndex]==true && bitsUsed>0; - out.Write(channelHasData); - if (channelHasData) - { - bitsPerChannel[channelIndex] = bitsUsed; - out.WriteCompressed(bitsUsed); - out.AlignWriteToByteBoundary(); - out.Write(serializationData[channelIndex]); - // Crap, forgot this line, was a huge bug in that I'd only send to the first 3 systems - serializationData[channelIndex].ResetReadPointer(); - } - else - { - bitsPerChannel[channelIndex] = 0; - } - } - replica->OnSerializeTransmission(&out, this, bitsPerChannel, curTime); - rakPeer->Send(&out,lastPro.priority,lastPro.reliability,lastPro.orderingChannel,systemAddress,false,lastPro.sendReceipt); - return SSICR_SENT_DATA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -SendSerializeIfChangedResult Connection_RM3::SendSerializeIfChanged(LastSerializationResult *lsr, SerializeParameters *sp, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager, RakNet::Time curTime) -{ - RakNet::Replica3 *replica = lsr->replica; - - if (replica->GetNetworkID()==UNASSIGNED_NETWORK_ID) - return SSICR_DID_NOT_SEND_DATA; - - RM3QuerySerializationResult rm3qsr = replica->QuerySerialization(this); - if (rm3qsr==RM3QSR_NEVER_CALL_SERIALIZE) - { - // Never again for this connection and replica pair - OnNeverSerialize(lsr, replicaManager); - return SSICR_NEVER_SERIALIZE; - } - - if (rm3qsr==RM3QSR_DO_NOT_CALL_SERIALIZE) - return SSICR_DID_NOT_SEND_DATA; - - if (replica->forceSendUntilNextUpdate) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (replica->lastSentSerialization.indicesToSend[z]) - sp->bitsWrittenSoFar+=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed(); - } - return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, replica->lastSentSerialization.bitStream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - for (int i=0; i < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; i++) - { - sp->outputBitstream[i].Reset(); - if (lsr->lastSerializationResultBS) - sp->lastSentBitstream[i]=&lsr->lastSerializationResultBS->bitStream[i]; - else - sp->lastSentBitstream[i]=&replica->lastSentSerialization.bitStream[i]; - } - - RM3SerializationResult serializationResult = replica->Serialize(sp); - - if (serializationResult==RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION) - { - // Never again for this connection and replica pair - OnNeverSerialize(lsr, replicaManager); - return SSICR_NEVER_SERIALIZE; - } - - if (serializationResult==RM3SR_DO_NOT_SERIALIZE) - { - // Don't serialize this tick only - return SSICR_DID_NOT_SEND_DATA; - } - - // This is necessary in case the user in the Serialize() function for some reason read the bitstream they also wrote - // WIthout this code, the Write calls to another bitstream would not write the entire bitstream - BitSize_t sum=0; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp->outputBitstream[z].ResetReadPointer(); - sum+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - } - - if (sum==0) - { - // Don't serialize this tick only - return SSICR_DID_NOT_SEND_DATA; - } - - if (serializationResult==RM3SR_SERIALIZED_ALWAYS) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - - lsr->AllocBS(); - lsr->lastSerializationResultBS->bitStream[z].Reset(); - lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - } - return SendSerialize(replica, allIndices, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - if (serializationResult==RM3SR_SERIALIZED_ALWAYS_IDENTICALLY) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - replica->lastSentSerialization.indicesToSend[z]=sp->outputBitstream[z].GetNumberOfBitsUsed()>0; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - replica->lastSentSerialization.bitStream[z].Reset(); - replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - replica->forceSendUntilNextUpdate=true; - } - return SendSerialize(replica, replica->lastSentSerialization.indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); - } - - bool indicesToSend[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) - { - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && - (serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION || - ((sp->outputBitstream[z].GetNumberOfBitsUsed()!=replica->lastSentSerialization.bitStream[z].GetNumberOfBitsUsed() || - memcmp(sp->outputBitstream[z].GetData(), replica->lastSentSerialization.bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0)))) - { - indicesToSend[z]=true; - replica->lastSentSerialization.indicesToSend[z]=true; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - replica->lastSentSerialization.bitStream[z].Reset(); - replica->lastSentSerialization.bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - replica->forceSendUntilNextUpdate=true; - } - else - { - indicesToSend[z]=false; - replica->lastSentSerialization.indicesToSend[z]=false; - } - } - } - else - { - lsr->AllocBS(); - - // RM3SR_SERIALIZED_UNIQUELY - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - if (sp->outputBitstream[z].GetNumberOfBitsUsed() > 0 && - (sp->outputBitstream[z].GetNumberOfBitsUsed()!=lsr->lastSerializationResultBS->bitStream[z].GetNumberOfBitsUsed() || - memcmp(sp->outputBitstream[z].GetData(), lsr->lastSerializationResultBS->bitStream[z].GetData(), sp->outputBitstream[z].GetNumberOfBytesUsed())!=0) - ) - { - indicesToSend[z]=true; - sp->bitsWrittenSoFar+=sp->outputBitstream[z].GetNumberOfBitsUsed(); - lsr->lastSerializationResultBS->bitStream[z].Reset(); - lsr->lastSerializationResultBS->bitStream[z].Write(&sp->outputBitstream[z]); - sp->outputBitstream[z].ResetReadPointer(); - } - else - { - indicesToSend[z]=false; - } - } - } - - - if (serializationResult==RM3SR_BROADCAST_IDENTICALLY || serializationResult==RM3SR_BROADCAST_IDENTICALLY_FORCE_SERIALIZATION) - replica->forceSendUntilNextUpdate=true; - - // Send out the data - return SendSerialize(replica, indicesToSend, sp->outputBitstream, sp->messageTimestamp, sp->pro, rakPeer, worldId, curTime); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -void Connection_RM3::OnLocalReference(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - RakAssert(replica3); - (void) replicaManager; - (void) constructionMode; - -#ifdef _DEBUG - for (unsigned int i=0; i < queryToConstructReplicaList.Size(); i++) - { - if (queryToConstructReplicaList[i]->replica==replica3) - { - RakAssert("replica added twice to queryToConstructReplicaList" && 0); - } - } - - if (constructedReplicaList.HasData(replica3)==true) - { - RakAssert("replica added to queryToConstructReplicaList when already in constructedReplicaList" && 0); - } -#endif - - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica3; - queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDereference(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - if (replica3->GetNetworkIDManager() == 0) - return; - - LastSerializationResult* lsr=0; - unsigned int idx; - - bool objectExists; - idx=constructedReplicaList.GetIndexFromKey(replica3, &objectExists); - if (objectExists) - { - lsr=constructedReplicaList[idx]; - constructedReplicaList.RemoveAtIndex(idx); - } - - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica==replica3) - { - lsr=queryToConstructReplicaList[idx]; - queryToConstructReplicaList.RemoveAtIndex(idx); - break; - } - } - - for (idx=0; idx < queryToSerializeReplicaList.Size(); idx++) - { - if (queryToSerializeReplicaList[idx]->replica==replica3) - { - lsr=queryToSerializeReplicaList[idx]; - queryToSerializeReplicaList.RemoveAtIndex(idx); - break; - } - } - - for (idx=0; idx < queryToDestructReplicaList.Size(); idx++) - { - if (queryToDestructReplicaList[idx]->replica==replica3) - { - lsr=queryToDestructReplicaList[idx]; - queryToDestructReplicaList.RemoveAtIndex(idx); - break; - } - } - - ValidateLists(replicaManager); - - if (lsr) - RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadFromThisSystem(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - RakAssert(replica3); - - ValidateLists(replicaManager); - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica3; - - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int j; - for (j=0; j < queryToConstructReplicaList.Size(); j++) - { - if (queryToConstructReplicaList[j]->replica==replica3 ) - { - queryToConstructReplicaList.RemoveAtIndex(j); - break; - } - } - - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - } - - if (constructedReplicaList.Insert(lsr->replica, lsr, true, _FILE_AND_LINE_) != (unsigned) -1) - { - //assert(queryToSerializeReplicaList.GetIndexOf(replica3)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - } - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadFromOtherSystem(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int j; - for (j=0; j < queryToConstructReplicaList.Size(); j++) - { - if (queryToConstructReplicaList[j]->replica==replica3 ) - { - return; - } - } - - OnLocalReference(replica3, replicaManager); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnNeverConstruct(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - RakNet::OP_DELETE(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnConstructToThisConnection(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); - //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnConstructToThisConnection(Replica3 *replica, ReplicaManager3 *replicaManager) -{ - RakAssert(replica); - RakAssert(QueryConstructionMode()==QUERY_CONNECTION_FOR_REPLICA_LIST); - (void) replicaManager; - - LastSerializationResult* lsr=RakNet::OP_NEW(_FILE_AND_LINE_); - lsr->replica=replica; - constructedReplicaList.Insert(replica,lsr,true,_FILE_AND_LINE_); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnNeverSerialize(LastSerializationResult *lsr, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]==lsr ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnReplicaAlreadyExists(unsigned int queryToConstructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToConstructReplicaList[queryToConstructIdx]; - queryToConstructReplicaList.RemoveAtIndex(queryToConstructIdx); - //assert(constructedReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - constructedReplicaList.Insert(lsr->replica,lsr,true,_FILE_AND_LINE_); - //assert(queryToDestructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToDestructReplicaList.Push(lsr,_FILE_AND_LINE_); - //assert(queryToSerializeReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToSerializeReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDownloadExisting(Replica3* replica3, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - - ConstructionMode constructionMode = QueryConstructionMode(); - if (constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION) - { - unsigned int idx; - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica==replica3) - { - OnConstructToThisConnection(idx, replicaManager); - return; - } - } - } - else - { - OnConstructToThisConnection(replica3, replicaManager); - } -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnSendDestructionFromQuery(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) -{ - ConstructionMode constructionMode = QueryConstructionMode(); - RakAssert(constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION || constructionMode==QUERY_REPLICA_FOR_CONSTRUCTION_AND_DESTRUCTION); - (void) constructionMode; - - ValidateLists(replicaManager); - LastSerializationResult* lsr = queryToDestructReplicaList[queryToDestructIdx]; - queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); - unsigned int j; - for (j=0; j < queryToSerializeReplicaList.Size(); j++) - { - if (queryToSerializeReplicaList[j]->replica==lsr->replica ) - { - queryToSerializeReplicaList.RemoveAtIndex(j); - break; - } - } - for (j=0; j < constructedReplicaList.Size(); j++) - { - if (constructedReplicaList[j]->replica==lsr->replica ) - { - constructedReplicaList.RemoveAtIndex(j); - break; - } - } - //assert(queryToConstructReplicaList.GetIndexOf(lsr->replica)==(unsigned int)-1); - queryToConstructReplicaList.Push(lsr,_FILE_AND_LINE_); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::OnDoNotQueryDestruction(unsigned int queryToDestructIdx, ReplicaManager3 *replicaManager) -{ - ValidateLists(replicaManager); - queryToDestructReplicaList.RemoveAtIndex(queryToDestructIdx); - ValidateLists(replicaManager); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::ValidateLists(ReplicaManager3 *replicaManager) const -{ - (void) replicaManager; - /* -#ifdef _DEBUG - // Each object should exist only once in either constructedReplicaList or queryToConstructReplicaList - // replicaPointer from LastSerializationResult should be same among all lists - unsigned int idx, idx2; - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - idx2=queryToConstructReplicaList.GetIndexOf(constructedReplicaList[idx]->replica); - if (idx2!=(unsigned int)-1) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - idx2=constructedReplicaList.GetIndexOf(queryToConstructReplicaList[idx]->replica); - if (idx2!=(unsigned int)-1) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - LastSerializationResult *lsr, *lsr2; - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - lsr=constructedReplicaList[idx]; - - idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToSerializeReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToDestructReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - } - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - lsr=queryToConstructReplicaList[idx]; - - idx2=queryToSerializeReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToSerializeReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - idx2=queryToDestructReplicaList.GetIndexOf(lsr->replica); - if (idx2!=(unsigned int)-1) - { - lsr2=queryToDestructReplicaList[idx2]; - if (lsr2!=lsr) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - } - - // Verify pointer integrity - for (idx=0; idx < constructedReplicaList.Size(); idx++) - { - if (constructedReplicaList[idx]->replica->replicaManager!=replicaManager) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } - - // Verify pointer integrity - for (idx=0; idx < queryToConstructReplicaList.Size(); idx++) - { - if (queryToConstructReplicaList[idx]->replica->replicaManager!=replicaManager) - { - int a=5; - assert(a==0); - int *b=0; - *b=5; - } - } -#endif - */ -} -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::SendConstruction(DataStructures::List &newObjects, DataStructures::List &deletedObjects, PRO sendParameters, RakNet::RakPeerInterface *rakPeer, unsigned char worldId, ReplicaManager3 *replicaManager3) -{ - if (newObjects.Size()==0 && deletedObjects.Size()==0) - return; - - // All construction and destruction takes place in the same network message - // Otherwise, if objects rely on each other being created the same tick to be valid, this won't always be true - // DataStructures::List serializedObjects; - BitSize_t offsetStart, offsetStart2, offsetEnd; - unsigned int newListIndex, oldListIndex; - RakNet::BitStream bsOut; - NetworkID networkId; - if (isFirstConstruction) - { - bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_STARTED); - bsOut.Write(worldId); - SerializeOnDownloadStarted(&bsOut); - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - } - - // LastSerializationResult* lsr; - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_CONSTRUCTION); - bsOut.Write(worldId); - uint16_t objectSize = (uint16_t) newObjects.Size(); - bsOut.Write(objectSize); - - // Construction - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); // overwritten to point to the end of the stream - networkId=newObjects[newListIndex]->GetNetworkID(); - bsOut.Write(networkId); - - RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); - bool actuallyCreateObject = cs==RM3CS_SEND_CONSTRUCTION; - bsOut.Write(actuallyCreateObject); - bsOut.AlignWriteToByteBoundary(); - - if (actuallyCreateObject) - { - // Actually create the object - bsOut.Write(newObjects[newListIndex]->creatingSystemGUID); - offsetStart2=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart2); // overwritten to point to after the call to WriteAllocationID - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - newObjects[newListIndex]->WriteAllocationID(this, &bsOut); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart2); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - newObjects[newListIndex]->SerializeConstruction(&bsOut, this); - } - else - { - newObjects[newListIndex]->SerializeConstructionExisting(&bsOut, this); - } - - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - - RakNet::BitStream bsOut2; - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - bsOut2.Reset(); - RM3ConstructionState cs = newObjects[newListIndex]->QueryConstruction(this, replicaManager3); - if (cs==RM3CS_SEND_CONSTRUCTION) - { - newObjects[newListIndex]->PostSerializeConstruction(&bsOut2, this); - } - else - { - RakAssert(cs==RM3CS_ALREADY_EXISTS_REMOTELY); - newObjects[newListIndex]->PostSerializeConstructionExisting(&bsOut2, this); - } - if (bsOut2.GetNumberOfBitsUsed()>0) - { - bsOut.Write(true); - bsOut.AlignWriteToByteBoundary(); - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); // overwritten to point to the end of the stream - networkId=newObjects[newListIndex]->GetNetworkID(); - bsOut.Write(networkId); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - bsOut.Write(&bsOut2); - bsOut.AlignWriteToByteBoundary(); // Give the user an aligned bitStream in case they use memcpy - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - else - bsOut.Write(false); - } - bsOut.AlignWriteToByteBoundary(); - - // Destruction - objectSize = (uint16_t) deletedObjects.Size(); - bsOut.Write(objectSize); - for (oldListIndex=0; oldListIndex < deletedObjects.Size(); oldListIndex++) - { - networkId=deletedObjects[oldListIndex]->GetNetworkID(); - bsOut.Write(networkId); - offsetStart=bsOut.GetWriteOffset(); - bsOut.Write(offsetStart); - deletedObjects[oldListIndex]->deletingSystemGUID=rakPeer->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS); - bsOut.Write(deletedObjects[oldListIndex]->deletingSystemGUID); - deletedObjects[oldListIndex]->SerializeDestruction(&bsOut, this); - bsOut.AlignWriteToByteBoundary(); - offsetEnd=bsOut.GetWriteOffset(); - bsOut.SetWriteOffset(offsetStart); - bsOut.Write(offsetEnd); - bsOut.SetWriteOffset(offsetEnd); - } - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - - // TODO - shouldn't this be part of construction? - - // Initial Download serialize to a new system - // Immediately send serialize after construction if the replica object already has saved data - // If the object was serialized identically, and does not change later on, then the new connection never gets the data - SerializeParameters sp; - sp.whenLastSerialized=0; - RakNet::BitStream emptyBs; - for (int index=0; index < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; index++) - { - sp.lastSentBitstream[index]=&emptyBs; - sp.pro[index]=sendParameters; - sp.pro[index].reliability=RELIABLE_ORDERED; - } - - sp.bitsWrittenSoFar=0; -// RakNet::Time t = RakNet::GetTimeMS(); - for (newListIndex=0; newListIndex < newObjects.Size(); newListIndex++) - { - sp.destinationConnection=this; - sp.messageTimestamp=0; - RakNet::Replica3 *replica = newObjects[newListIndex]; - // 8/22/09 Forgot ResetWritePointer - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.outputBitstream[z].ResetWritePointer(); - } - - RM3SerializationResult res = replica->Serialize(&sp); - if (res!=RM3SR_NEVER_SERIALIZE_FOR_THIS_CONNECTION && - res!=RM3SR_DO_NOT_SERIALIZE && - res!=RM3SR_SERIALIZED_UNIQUELY) - { - bool allIndices[RM3_NUM_OUTPUT_BITSTREAM_CHANNELS]; - for (int z=0; z < RM3_NUM_OUTPUT_BITSTREAM_CHANNELS; z++) - { - sp.bitsWrittenSoFar+=sp.outputBitstream[z].GetNumberOfBitsUsed(); - allIndices[z]=true; - } - SendSerialize(replica, allIndices, sp.outputBitstream, sp.messageTimestamp, sp.pro, rakPeer, worldId, GetTime()); -/// newObjects[newListIndex]->whenLastSerialized=t; - - } - // else wait for construction request accepted before serializing - } - - if (isFirstConstruction) - { - bsOut.Reset(); - bsOut.Write((MessageID)ID_REPLICA_MANAGER_DOWNLOAD_COMPLETE); - bsOut.Write(worldId); - SerializeOnDownloadComplete(&bsOut); - rakPeer->Send(&bsOut,sendParameters.priority,RELIABLE_ORDERED,sendParameters.orderingChannel,systemAddress,false,sendParameters.sendReceipt); - } - - isFirstConstruction=false; - -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Connection_RM3::SendValidation(RakNet::RakPeerInterface *rakPeer, WorldId worldId) -{ - // Hijack to mean sendValidation - RakNet::BitStream bsOut; - bsOut.Write((MessageID)ID_REPLICA_MANAGER_SCOPE_CHANGE); - bsOut.Write(worldId); - rakPeer->Send(&bsOut,HIGH_PRIORITY,RELIABLE_ORDERED,0,systemAddress,false); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3::Replica3() -{ - creatingSystemGUID=UNASSIGNED_RAKNET_GUID; - deletingSystemGUID=UNASSIGNED_RAKNET_GUID; - replicaManager=0; - forceSendUntilNextUpdate=false; - lsr=0; - referenceIndex = (uint32_t)-1; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -Replica3::~Replica3() -{ - if (replicaManager) - { - replicaManager->Dereference(this); - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -void Replica3::BroadcastDestruction(void) -{ - replicaManager->BroadcastDestruction(this,UNASSIGNED_SYSTEM_ADDRESS); -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RakNetGUID Replica3::GetCreatingSystemGUID(void) const -{ - return creatingSystemGUID; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_ClientConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3CS_SEND_CONSTRUCTION; - // Send back to the owner client too, because they couldn't assign the network ID - if (isThisTheServer) - return RM3CS_SEND_CONSTRUCTION; - return RM3CS_NEVER_CONSTRUCT; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_ClientConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) -{ - (void) sourceConnection; - (void) isThisTheServer; - - // OK to create - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_ServerConstruction(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - - if (isThisTheServer) - return RM3CS_SEND_CONSTRUCTION; - return RM3CS_NEVER_CONSTRUCT; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_ServerConstruction(RakNet::Connection_RM3 *sourceConnection, bool isThisTheServer) -{ - (void) sourceConnection; - if (isThisTheServer) - return false; - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ConstructionState Replica3::QueryConstruction_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) -{ - (void) destinationConnection; - - if (p2pMode==R3P2PM_SINGLE_OWNER) - { - // We send to all, others do nothing - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3CS_SEND_CONSTRUCTION; - - // RM3CS_NEVER_CONSTRUCT will not send the object, and will not Serialize() it - return RM3CS_NEVER_CONSTRUCT; - } - else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_SEND_CONSTRUCTION; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_ALREADY_EXISTS_REMOTELY; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) - { - return RM3CS_ALREADY_EXISTS_REMOTELY_DO_NOT_CONSTRUCT; - } - else - { - RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); - - // RM3CS_ALREADY_EXISTS_REMOTELY will not send the object, but WILL call QuerySerialization() and Serialize() on it. - return RM3CS_ALREADY_EXISTS_REMOTELY; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -bool Replica3::QueryRemoteConstruction_PeerToPeer(RakNet::Connection_RM3 *sourceConnection) -{ - (void) sourceConnection; - - return true; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_ClientSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - // Owner client sends to all - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3QSR_CALL_SERIALIZE; - // Server sends to all but owner client - if (isThisTheServer && destinationConnection->GetRakNetGUID()!=creatingSystemGUID) - return RM3QSR_CALL_SERIALIZE; - // Remote clients do not send - return RM3QSR_NEVER_CALL_SERIALIZE; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_ServerSerializable(RakNet::Connection_RM3 *destinationConnection, bool isThisTheServer) -{ - (void) destinationConnection; - // Server sends to all - if (isThisTheServer) - return RM3QSR_CALL_SERIALIZE; - - // Clients do not send - return RM3QSR_NEVER_CALL_SERIALIZE; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3QuerySerializationResult Replica3::QuerySerialization_PeerToPeer(RakNet::Connection_RM3 *destinationConnection, Replica3P2PMode p2pMode) -{ - (void) destinationConnection; - - if (p2pMode==R3P2PM_SINGLE_OWNER) - { - // Owner peer sends to all - if (creatingSystemGUID==replicaManager->GetRakPeerInterface()->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS)) - return RM3QSR_CALL_SERIALIZE; - - // Remote peers do not send - return RM3QSR_NEVER_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_MULTI_OWNER_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_CALL_SERIALIZE; - } - else if (p2pMode==R3P2PM_STATIC_OBJECT_NOT_CURRENTLY_AUTHORITATIVE) - { - return RM3QSR_DO_NOT_CALL_SERIALIZE; - } - else - { - RakAssert(p2pMode==R3P2PM_MULTI_OWNER_NOT_CURRENTLY_AUTHORITATIVE); - return RM3QSR_DO_NOT_CALL_SERIALIZE; - } -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Client(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_Server(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA_AND_BROADCAST_DESTRUCTION; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -RM3ActionOnPopConnection Replica3::QueryActionOnPopConnection_PeerToPeer(RakNet::Connection_RM3 *droppedConnection) const -{ - (void) droppedConnection; - return RM3AOPC_DELETE_REPLICA; -} - -// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -#endif // _RAKNET_SUPPORT_* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh deleted file mode 100644 index 72b0a36..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -e -MODE=${1} -echo running in mode ${MODE} ... -mkdir -p build/${MODE} -pushd build/${MODE} -cmake ../.. -DCMAKE_BUILD_TYPE=$MODE -DCMAKE_MODULE_PATH=$(pwd)/../../cmaki -DFIRST_ERROR=1 -cmake --build . --config $MODE --target install -- -j8 -k || cmake --build . --config ${MODE} --target install -- -j1 -ctest . --no-compress-output --output-on-failure -T Test -C ${MODE} -V -popd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh deleted file mode 100644 index 967bf29..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_test.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -function print_if_has_content() -{ - file=$1 - minimumsize=400 - actualsize=$(wc -c <"$file") - if [ $actualsize -ge $minimumsize ]; - then - cat $file - fi -} - -echo Running test: $1 -export ASAN_SYMBOLIZER_PATH=$(which llvm-symbolizer-3.6) -export ASAN_OPTIONS="check_initialization_order=1" -rm $1.coverage 2> /dev/null -rm $1.gcno 2> /dev/null -rm default.profraw 2> /dev/null -./$1 -ret=$? -llvm-profdata-3.6 merge -o $1.gcno default.profraw 2> /dev/null -llvm-cov-3.6 show ./$1 -instr-profile=$1.gcno > $1.coverage -cat $1.coverage | ansi2html > $1.html -print_if_has_content $1.html -exit $ret - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py deleted file mode 100644 index 66f01d7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/run_tests.py +++ /dev/null @@ -1,175 +0,0 @@ -import os -import utils -import logging -from third_party import platforms -from third_party import build_unittests_foldername -from itertools import product -from third_party import get_identifier - -def run_tests(node, parameters, compiler_replace_maps, unittests): - - old_cwd = os.getcwd() - - rootdir = parameters.rootdir - rootdir = utils.get_norm_path(rootdir) - rootdir = rootdir.replace('\\', '/') - - cmakelib_dir = parameters.cmakefiles - cmakelib_dir = utils.get_norm_path(cmakelib_dir) - cmakelib_dir = cmakelib_dir.replace('\\', '/') - - cmake3p_dir = parameters.prefix - cmake3p_dir = utils.get_norm_path(cmake3p_dir) - cmake3p_dir = cmake3p_dir.replace('\\', '/') - - cmake_prefix = parameters.prefix - cmake_prefix = utils.get_norm_path(cmake_prefix) - cmake_prefix = cmake_prefix.replace('\\', '/') - - cmake_third_party_dir = parameters.third_party_dir - cmake_third_party_dir = utils.get_norm_path(cmake_third_party_dir) - cmake_third_party_dir = cmake_third_party_dir.replace('\\', '/') - - package = node.get_package_name() - package_upper = node.get_package_name_norm_upper() - version = node.get_version() - packing = node.is_packing() - if not packing: - logging.warning("No need run_tests, because wasn't generated a package") - return 0 - - # prepare unittests - # can be a file or content - unittest_value = node.get_unittest() - if unittest_value is not None: - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, build_modes): - builddir = node.get_build_directory(plat, build_mode) - path_test = os.path.join(builddir, build_unittests_foldername) - utils.trymkdir(path_test) - - # is is a file - unittest_path = os.path.join(builddir, unittest_value) - if os.path.isfile(unittest_path): - with open(unittest_path, 'rt') as f: - unittest_value = f.read() - - with open(os.path.join(path_test, 'main.cpp'), 'wt') as f: - f.write(unittest_value) - - if parameters.fast: - logging.debug('skipping for because is in fast mode: "prepare"') - break - else: - logging.warning('[%s] No test present.' % package) - - folder_3rdparty = parameters.third_party_dir - output_3rdparty = os.path.join(folder_3rdparty, node.get_base_folder()) - - build_modes = node.get_build_modes() - for plat, build_mode in product(platforms, reversed(build_modes)): - for compiler_c, compiler_cpp, generator, _, _, env_modified, _ in node.compiler_iterator(plat, compiler_replace_maps): - # verify md5sum - install_directory = node.get_install_directory(plat) - workspace = node.get_workspace(plat) - utils.trymkdir(install_directory) - with utils.working_directory(install_directory): - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % workspace) - prefix_package_md5 = os.path.join(output_3rdparty, '%s.md5' % workspace) - if os.path.exists(prefix_package) and os.path.exists(prefix_package_md5): - with open(prefix_package_md5, 'rt') as f: - md5sum = f.read().strip() - - try: - logging.debug("expected md5: %s" % md5sum) - for line in utils.get_stdout('cmake -E md5sum %s' % prefix_package, env_modified, 'cmake'): - if len(line) > 0: - # md5sum filename - chunks = line.split(' ') - chunks = list(filter(None, chunks)) - assert(len(chunks) > 0) - md5sum_real = chunks[0] - logging.debug("real md5: %s" % md5sum_real) - - if (md5sum != md5sum_real): - logging.error('Error en generated md5sum file!!!') - logging.error('Expected: %s' % md5sum) - logging.error('Found: %s' % md5sum_real) - # add error to node - node.ret += 1 - except utils.NotFoundProgram: - logging.info('can\'t verify md5 because not found cmake') - else: - logging.warning('Skipping verification md5 because don\'t exists package or md5') - - logging.info('running unittests. Build mode: %s Platform: %s' % (build_mode, plat)) - - # OJO con borrar cmake3p, se borra la marca - # node.remove_cmake3p( cmake3p_dir ) - - builddir = os.path.join(old_cwd, node.get_build_directory(plat, build_mode)) - logging.info('Using builddir %s' % builddir) - unittest_folder = os.path.join(builddir, build_unittests_foldername) - unittest_found = os.path.join(unittest_folder, 'main.cpp') - unittest_found = unittest_found.replace('\\', '/') - unittest_root = os.path.join(old_cwd, build_unittests_foldername) - - if os.path.exists(unittest_found): - - logging.info('Search cmakelib in %s' % cmakelib_dir) - if os.path.isdir(os.path.join(cmakelib_dir)): - - with utils.working_directory(unittest_folder): - - generator_extra = '' - if generator is not None: - generator_extra = '-G"%s"' % generator - - find_packages = [] - find_packages.append(package) - for dep in node.get_depends_raw(): - package_name = dep.get_package_name() - find_packages.append(package_name) - find_packages_str = ';'.join(find_packages) - - # remove CMakeCache.txt for avoid problems when - # change of generator - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - - cmd = 'cmake %s %s -DNPP_ARTIFACTS_PATH="%s" -DCMAKI_COMPILER="%s" -DCMAKI_PLATFORM="%s" -DCMAKE_MODULE_PATH="%s" -DPACKAGE="%s" -DPACKAGE_UPPER="%s" -DCMAKE_BUILD_TYPE="%s" -DCMAKE_PREFIX_PATH="%s" -DUNITTEST_PATH="%s" -DDEPENDS_PATH="%s" -DFIND_PACKAGES="%s" && cmake --build . --config %s --target install && ctest . -C %s --output-on-failure -VV' % ( - unittest_root, - generator_extra, - cmake_prefix, - get_identifier('COMPILER'), - get_identifier('ALL'), - cmakelib_dir, - package, - package_upper, - build_mode, - cmake_third_party_dir, - unittest_found, - cmake_prefix, - find_packages_str, - build_mode, - build_mode) - ret = utils.safe_system(cmd, env=env_modified) - node.ret += abs(ret) - if ret != 0: - unittests[ '%s - %s' % (package, version) ] = 'ERROR: Fail test' - else: - unittests[ '%s - %s' % (package, version) ] = 'OK: Pass test' - else: - unittests[ '%s - %s' % (package, version) ] = 'WARN: No cmakelib available' - else: - unittests[ '%s - %s' % (package, version) ] = 'WARN: No unittest found' - - if node.ret != 0: - logging.warning('Cleaning packages because tests are failed.') - node.remove_packages() - - # successful - return True - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py deleted file mode 100755 index 57fd37a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/save_package.py +++ /dev/null @@ -1,31 +0,0 @@ -import os -import sys -import logging -import argparse -import urllib -import csv -import utils -import subprocess - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--name', required=True, dest='name', help='name package', default=None) - parser.add_argument('--version', required=True, dest='version', help='version package fixed', default=None) - parser.add_argument('--depends', required=True, dest='depends', help='json for save versions', default=None) - parameters = parser.parse_args() - - depends_file = parameters.depends - if os.path.exists(depends_file): - data = utils.deserialize(depends_file) - else: - data = {} - # serialize if is new data - if parameters.name not in data: - data[parameters.name] = parameters.version - logging.info('serialize data = %s' % data) - depends_file_tmp = depends_file + '.tmp' - utils.serialize(data, depends_file_tmp) - ret = subprocess.call('python -m json.tool %s > %s' % (depends_file_tmp, depends_file), shell=True) - os.remove(depends_file_tmp) - sys.exit(ret) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt deleted file mode 100644 index 6683d9c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2-emscripten/CMakeLists.txt +++ /dev/null @@ -1,1366 +0,0 @@ -cmake_minimum_required(VERSION 2.8) -project(SDL2 C) -include(CheckFunctionExists) -include(CheckLibraryExists) -include(CheckIncludeFiles) -include(CheckIncludeFile) -include(CheckSymbolExists) -include(CheckCSourceRuns) -include(CheckCCompilerFlag) -include(CheckTypeSize) -include(CheckStructHasMember) -include(CMakeDependentOption) -include(FindPkgConfig) -set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") -include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) -include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) - -# General settings -# Edit include/SDL_version.h and change the version, then: -# SDL_MICRO_VERSION += 1; -# SDL_INTERFACE_AGE += 1; -# SDL_BINARY_AGE += 1; -# if any functions have been added, set SDL_INTERFACE_AGE to 0. -# if backwards compatibility has been broken, -# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. -set(SDL_MAJOR_VERSION 2) -set(SDL_MINOR_VERSION 0) -set(SDL_MICRO_VERSION 3) -set(SDL_INTERFACE_AGE 1) -set(SDL_BINARY_AGE 3) -set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") - -# Calculate a libtool-like version number -math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") -set(LT_REVISION "${SDL_INTERFACE_AGE}") -set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") -set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") - -message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") - -# General settings & flags -set(LIBRARY_OUTPUT_DIRECTORY "build") -# Check for 64 or 32 bit -set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) -if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(ARCH_64 TRUE) - set(PROCESSOR_ARCH "x64") -else() - set(ARCH_64 FALSE) - set(PROCESSOR_ARCH "x86") -endif() -set(LIBNAME SDL2) -if(NOT LIBTYPE) - set(LIBTYPE SHARED) -endif() - -# Get the platform -if(WIN32) - if(NOT WINDOWS) - set(WINDOWS TRUE) - endif() -elseif(UNIX AND NOT APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") - set(LINUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") - set(NETBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") - set(OPENBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") - set(GNU TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") - set(BSDI TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") - set(SYSV5 TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") - set(SOLARIS TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") - set(HPUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") - set(AIX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") - set(MINIX TRUE) - endif() -elseif(APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") - set(DARWIN TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") - set(MACOSX TRUE) - endif() - # TODO: iOS? -elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") - message_error("BeOS support has been removed as of SDL 2.0.2.") -elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") - set(HAIKU TRUE) -endif() - -# Don't mistake osx for unix -if(UNIX AND NOT APPLE) - set(UNIX_SYS ON) -else() - set(UNIX_SYS OFF) -endif() - -if(UNIX OR APPLE) - set(UNIX_OR_MAC_SYS ON) -else() - set(UNIX_OR_MAC_SYS OFF) -endif() - -if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. - set(PTHREADS_ENABLED_BY_DEFAULT ON) -else() - set(PTHREADS_ENABLED_BY_DEFAULT OFF) -endif() - -# Default option knobs -if(APPLE OR ARCH_64) - set(OPT_DEF_SSEMATH ON) -endif() -if(UNIX OR MINGW OR MSYS) - set(OPT_DEF_LIBC ON) -endif() - -# Compiler info -if(CMAKE_COMPILER_IS_GNUCC) - set(USE_GCC TRUE) - set(OPT_DEF_ASM TRUE) -elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") - set(USE_CLANG TRUE) - set(OPT_DEF_ASM TRUE) -elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ - set(OPT_DEF_ASM TRUE) - #set(CMAKE_C_FLAGS "/ZI /WX- / -else() - set(OPT_DEF_ASM FALSE) -endif() - -# Default flags, if not set otherwise -if("$ENV{CFLAGS}" STREQUAL "") - if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "-g -O3") - endif() -else() - set(CMAKE_C_FLAGS "$ENV{CFLAGS}") - list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") -endif() -if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 - list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") -endif() - -if(MSVC) - option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) - if(FORCE_STATIC_VCRT) - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - if(${flag_var} MATCHES "/MD") - string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - endif() - endforeach() - endif() -endif() - -# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, -# etc. are created correctly. -set(SDL_LIBS "-lSDL2") -set(SDL_CFLAGS "") - -# Emscripten toolchain has a nonempty default value for this, and the checks -# in this file need to change that, so remember the original value, and -# restore back to that afterwards. For check_function_exists() to work in -# Emscripten, this value must be at its default value. -set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) - -if(CYGWIN) - # We build SDL on cygwin without the UNIX emulation layer - include_directories("-I/usr/include/mingw") - set(CMAKE_REQUIRED_FLAGS "-mno-cygwin") - check_c_source_compiles("int main(int argc, char **argv) {}" - HAVE_GCC_NO_CYGWIN) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_GCC_NO_CYGWIN) - list(APPEND EXTRA_LDFLAGS "-mno-cygwin") - list(APPEND SDL_LIBS "-mno-cygwin") - endif() - set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") -endif() - -add_definitions(-DUSING_GENERATED_CONFIG_H) -# General includes -include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) - -if(EMSCRIPTEN) - # Set up default values for the currently supported set of subsystems: - # Emscripten/Javascript does not have assembly support, a dynamic library - # loading architecture, low-level CPU inspection or multithreading. - set(OPT_DEF_ASM FALSE) - set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) - set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) - set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) - set(DLOPEN_ENABLED_BY_DEFAULT OFF) -else() - set(SDL_SHARED_ENABLED_BY_DEFAULT ON) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT ON) - set(SDL_THREADS_ENABLED_BY_DEFAULT ON) - set(SDL_LOADSO_ENABLED_BY_DEFAULT ON) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT ON) - set(DLOPEN_ENABLED_BY_DEFAULT ON) -endif() - -set(SDL_SUBSYSTEMS - Atomic Audio Video Render Events Joystick Haptic Power Threads Timers - File Loadso CPUinfo Filesystem) -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) - set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) - endif() - option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) -endforeach() - -option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") -#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) -set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) -set_option(GCC_ATOMICS "Use gcc builtin atomics" ${USE_GCC}) -set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) -set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) -set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) -set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) -set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) -set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) -set_option(DISKAUDIO "Support the disk writer audio driver" ON) -set_option(DUMMYAUDIO "Support the dummy audio driver" ON) -set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) -dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) -set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) -dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND_SHARED" OFF) -set_option(VIDEO_DUMMY "Use dummy video driver" ON) -set_option(VIDEO_OPENGL "Include OpenGL support" ON) -set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) -set_option(PTHREADS "Use POSIX threads for multi-threading" ${PTHREADS_ENABLED_BY_DEFAULT}) -dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) -set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${DLOPEN_ENABLED_BY_DEFAULT}) -set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) -set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) -dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) -set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) -dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) -set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) -dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) -set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) -dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) -set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) -set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) -set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) -set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) -set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) -set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) -set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) -set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) -set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) -dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) -set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) -foreach(_SUB ${SDL_X11_OPTIONS}) - string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) - dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) -endforeach() -set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) -set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) -set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) - -# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here -# The options below are for compatibility to configure's default behaviour. -set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") -set(SDL_STATIC ON CACHE BOOL "Build a static version of the library") - -# General source files -file(GLOB SOURCE_FILES - ${SDL2_SOURCE_DIR}/src/*.c - ${SDL2_SOURCE_DIR}/src/atomic/*.c - ${SDL2_SOURCE_DIR}/src/audio/*.c - ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c - ${SDL2_SOURCE_DIR}/src/dynapi/*.c - ${SDL2_SOURCE_DIR}/src/events/*.c - ${SDL2_SOURCE_DIR}/src/file/*.c - ${SDL2_SOURCE_DIR}/src/libm/*.c - ${SDL2_SOURCE_DIR}/src/render/*.c - ${SDL2_SOURCE_DIR}/src/render/*/*.c - ${SDL2_SOURCE_DIR}/src/stdlib/*.c - ${SDL2_SOURCE_DIR}/src/thread/*.c - ${SDL2_SOURCE_DIR}/src/timer/*.c - ${SDL2_SOURCE_DIR}/src/video/*.c) - - -if(ASSERTIONS STREQUAL "auto") - # Do nada - use optimization settings to determine the assertion level -elseif(ASSERTIONS STREQUAL "disabled") - set(SDL_DEFAULT_ASSERT_LEVEL 0) -elseif(ASSERTIONS STREQUAL "release") - set(SDL_DEFAULT_ASSERT_LEVEL 1) -elseif(ASSERTIONS STREQUAL "enabled") - set(SDL_DEFAULT_ASSERT_LEVEL 2) -elseif(ASSERTIONS STREQUAL "paranoid") - set(SDL_DEFAULT_ASSERT_LEVEL 3) -else() - message_error("unknown assertion level") -endif() -set(HAVE_ASSERTIONS ${ASSERTIONS}) - -# Compiler option evaluation -if(USE_GCC OR USE_CLANG) - if(DEPENDENCY_TRACKING) - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 3 - #error Dependency tracking requires GCC 3.0 or newer - #endif - int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) - endif() - - if(GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - void *x, *y, *z; - __sync_lock_test_and_set(&a, 4); - __sync_lock_test_and_set(&x, y); - __sync_fetch_and_add(&a, 1); - __sync_bool_compare_and_swap(&a, 5, 10); - __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) - if(NOT HAVE_GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - __sync_lock_test_and_set(&a, 1); - __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) - endif() - endif() - - set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") - check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" - HAVE_GCC_PREFERRED_STACK_BOUNDARY) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror -Wno-error=implicit-function-declaration") - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 4 - #error SDL only uses visibility attributes in GCC 4 or newer - #endif - int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) - if(HAVE_GCC_FVISIBILITY) - list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - check_c_compiler_flag(-Wall HAVE_GCC_WALL) - if(HAVE_GCC_WALL) - if(HAIKU) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") - endif() - endif() -endif() - -if(ASSEMBLY) - if(USE_GCC OR USE_CLANG) - set(SDL_ASSEMBLY_ROUTINES 1) - # TODO: Those all seem to be quite GCC specific - needs to be - # reworked for better compiler support - set(HAVE_ASSEMBLY TRUE) - if(MMX) - set(CMAKE_REQUIRED_FLAGS "-mmmx") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __MMX__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_MMX) - if(HAVE_MMX) - list(APPEND EXTRA_CFLAGS "-mmmx") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(3DNOW) - set(CMAKE_REQUIRED_FLAGS "-m3dnow") - check_c_source_compiles(" - #include - #ifndef __3dNOW__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { - void *p = 0; - _m_prefetch(p); - }" HAVE_3DNOW) - if(HAVE_3DNOW) - list(APPEND EXTRA_CFLAGS "-m3dnow") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE) - set(CMAKE_REQUIRED_FLAGS "-msse") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE) - if(HAVE_SSE) - list(APPEND EXTRA_CFLAGS "-msse") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE2) - set(CMAKE_REQUIRED_FLAGS "-msse2") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE2__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE2) - if(HAVE_SSE2) - list(APPEND EXTRA_CFLAGS "-msse2") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSEMATH) - if(SSE OR SSE2) - if(USE_GCC) - list(APPEND EXTRA_CFLAGS "-mfpmath=387") - endif() - set(HAVE_SSEMATH TRUE) - endif() - endif() - - if(ALTIVEC) - set(CMAKE_REQUIRED_FLAGS "-maltivec") - check_c_source_compiles(" - #include - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) - check_c_source_compiles(" - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set - list(APPEND EXTRA_CFLAGS "-maltivec") - set(SDL_ALTIVEC_BLITTERS 1) - if(HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC_H 1) - endif() - endif() - endif() - elseif(MSVC_VERSION GREATER 1500) - # TODO: SDL_cpuinfo.h needs to support the user's configuration wish - # for MSVC - right now it is always activated - if(NOT ARCH_64) - set(HAVE_MMX TRUE) - set(HAVE_3DNOW TRUE) - endif() - set(HAVE_SSE TRUE) - set(HAVE_SSE2 TRUE) - set(SDL_ASSEMBLY_ROUTINES 1) - endif() -# TODO: -#else() -# if(USE_GCC OR USE_CLANG) -# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-mmx") -# endif() -endif() - -# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define -# anything. -if(LIBC) - if(WINDOWS AND NOT MINGW) - set(HAVE_LIBC TRUE) - foreach(_HEADER stdio.h string.h ctype.h math.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - set(${_HAVE_H} 1) - endforeach() - set(HAVE_SIGNAL_H 1) - foreach(_FN - malloc calloc realloc free qsort abs memset memcpy memmove memcmp - strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp - _stricmp _strnicmp sscanf atan atan2 acos asin ceil copysign cos - cosf fabs floor log pow scalbn sin sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(HAVE_${_UPPER} 1) - endforeach() - if(NOT CYGWIN AND NOT MINGW) - set(HAVE_ALLOCA 1) - endif() - set(HAVE_M_PI 1) - add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI - set(STDC_HEADERS 1) - else() - set(HAVE_LIBC TRUE) - check_include_file(sys/types.h HAVE_SYS_TYPES_H) - foreach(_HEADER - stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h - strings.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - check_include_file("${_HEADER}" ${_HAVE_H}) - endforeach() - - check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) - check_type_size("size_t" SIZEOF_SIZE_T) - check_symbol_exists(M_PI math.h HAVE_M_PI) - # TODO: refine the mprotect check - check_c_source_compiles("#include - #include - int main() { }" HAVE_MPROTECT) - foreach(_FN - strtod malloc calloc realloc free getenv setenv putenv unsetenv - qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat - strdup _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull - atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp - vsscanf vsnprintf fseeko fseeko64 sigaction setjmp - nanosleep sysconf sysctlbyname - ) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - - check_library_exists(m pow "" HAVE_LIBM) - if(HAVE_LIBM) - set(CMAKE_REQUIRED_LIBRARIES m) - foreach(_FN - atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin - sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - set(CMAKE_REQUIRED_LIBRARIES) - list(APPEND EXTRA_LIBS m) - endif() - - check_library_exists(iconv iconv_open "" HAVE_LIBICONV) - if(HAVE_LIBICONV) - list(APPEND EXTRA_LIBS iconv) - endif() - - check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) - endif() -else() - if(WINDOWS) - set(HAVE_STDARG_H 1) - set(HAVE_STDDEF_H 1) - endif() -endif() - - -# Enable/disable various subsystems of the SDL library -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if(NOT SDL_${_OPT}) - set(SDL_${_OPT}_DISABLED 1) - endif() -endforeach() -if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) -endif() -if(SDL_HAPTIC) - if(NOT SDL_JOYSTICK) - # Haptic requires some private functions from the joystick subsystem. - message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") - endif() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(SDL_POWER) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) -endif() -# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: -# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set -# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), -# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings - -# General SDL subsystem options, valid for all platforms -if(SDL_AUDIO) - # CheckDummyAudio/CheckDiskAudio - valid for all platforms - if(DUMMYAUDIO) - set(SDL_AUDIO_DRIVER_DUMMY 1) - file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) - set(HAVE_DUMMYAUDIO TRUE) - endif() - if(DISKAUDIO) - set(SDL_AUDIO_DRIVER_DISK 1) - file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) - set(HAVE_DISKAUDIO TRUE) - endif() -endif() - -if(SDL_DLOPEN) - # Relevant for Unix/Darwin only - if(UNIX OR APPLE) - CheckDLOPEN() - endif() -endif() - -if(SDL_VIDEO) - if(VIDEO_DUMMY) - set(SDL_VIDEO_DRIVER_DUMMY 1) - file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) - set(HAVE_VIDEO_DUMMY TRUE) - set(HAVE_SDL_VIDEO TRUE) - endif() -endif() - -# Platform-specific options and settings -if(EMSCRIPTEN) - # Hide noisy warnings that intend to aid mostly during initial stages of porting a new - # project. Uncomment at will for verbose cross-compiling -I/../ path info. - add_definitions(-Wno-warn-absolute-paths) - if(SDL_AUDIO) - file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) - endif() - if(SDL_FILESYSTEM) - file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) - endif() - if(SDL_JOYSTICK) - file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) - endif() - if(SDL_POWER) - file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) - endif() - if(SDL_VIDEO) - file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) - endif() -elseif(UNIX AND NOT APPLE) - if(SDL_AUDIO) - if(SYSV5 OR SOLARIS OR HPUX) - set(SDL_AUDIO_DRIVER_SUNAUDIO 1) - file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(NETBSD OR OPENBSD) - set(SDL_AUDIO_DRIVER_BSD 1) - file(GLOB BSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/bsd/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${BSD_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(AIX) - set(SDL_AUDIO_DRIVER_PAUDIO 1) - file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - CheckOSS() - CheckALSA() - CheckPulseAudio() - CheckESD() - CheckARTS() - CheckNAS() - CheckSNDIO() - CheckFusionSound() - endif() - - if(SDL_VIDEO) - CheckX11() - CheckMir() - CheckDirectFB() - CheckOpenGLX11() - CheckOpenGLESX11() - CheckWayland() - endif() - - if(LINUX) - check_c_source_compiles(" - #include - #ifndef EVIOCGNAME - #error EVIOCGNAME() ioctl not available - #endif - int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) - - check_c_source_compiles(" - #include - #include - - int main(int argc, char **argv) - { - struct kbentry kbe; - kbe.kb_table = KG_CTRL; - ioctl(0, KDGKBENT, &kbe); - }" HAVE_INPUT_KD) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(HAVE_INPUT_EVENTS) - set(SDL_INPUT_LINUXEV 1) - endif() - - if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) - set(SDL_HAPTIC_LINUX 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - if(HAVE_INPUT_KD) - set(SDL_INPUT_LINUXKD 1) - endif() - - check_include_file("libudev.h" HAVE_LIBUDEV_H) - - # !!! FIXME: this needs pkg-config to find the include path, I think. - check_include_file("dbus/dbus.h" HAVE_DBUS_DBUS_H) - endif() - - if(INPUT_TSLIB) - check_c_source_compiles(" - #include \"tslib.h\" - int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) - if(HAVE_INPUT_TSLIB) - set(SDL_INPUT_TSLIB 1) - list(APPEND EXTRA_LIBS ts) - endif() - endif() - - if(SDL_JOYSTICK) - CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? - if(LINUX) - set(SDL_JOYSTICK_LINUX 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - endif() - - CheckPTHREAD() - - if(CLOCK_GETTIME) - check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - list(APPEND EXTRA_LIBS rt) - set(HAVE_CLOCK_GETTIME 1) - else() - check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - endif() - - check_include_file(linux/version.h HAVE_LINUX_VERSION_H) - if(HAVE_LINUX_VERSION_H) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") - endif() - - if(SDL_POWER) - if(LINUX) - set(SDL_POWER_LINUX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_UNIX 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(RPATH) - set(SDL_RLD_FLAGS "") - if(BSDI OR FREEBSD OR LINUX OR NETBSD) - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") - elseif(SOLARIS) - set(SDL_RLD_FLAGS "-R\${libdir}") - endif() - set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - set(HAVE_RPATH TRUE) - endif() - -elseif(WINDOWS) - find_program(WINDRES windres) - - check_c_source_compiles(" - #include - int main(int argc, char **argv) { }" HAVE_WIN32_CC) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - # Check for DirectX - if(DIRECTX) - if("$ENV{DXSDK_DIR}" STREQUAL "") - message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") - endif() - set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") - check_include_file(d3d9.h HAVE_D3D_H) - check_include_file(d3d11_1.h HAVE_D3D11_H) - check_include_file(ddraw.h HAVE_DDRAW_H) - check_include_file(dsound.h HAVE_DSOUND_H) - check_include_file(dinput.h HAVE_DINPUT_H) - check_include_file(xaudio2.h HAVE_XAUDIO2_H) - check_include_file(dxgi.h HAVE_DXGI_H) - if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H OR HAVE_XAUDIO2_H) - set(HAVE_DIRECTX TRUE) - # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks - link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) - include_directories($ENV{DXSDK_DIR}\\Include) - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_WINMM 1) - file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - - if(HAVE_DSOUND_H) - set(SDL_AUDIO_DRIVER_DSOUND 1) - file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) - endif() - - if(HAVE_XAUDIO2_H) - set(SDL_AUDIO_DRIVER_XAUDIO2 1) - file(GLOB XAUDIO2_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/xaudio2/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${XAUDIO2_AUDIO_SOURCES}) - endif() - endif() - - if(SDL_VIDEO) - # requires SDL_LOADSO on Windows (IME, DX, etc.) - if(NOT SDL_LOADSO) - message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") - endif() - set(SDL_VIDEO_DRIVER_WINDOWS 1) - file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) - - if(RENDER_D3D AND HAVE_D3D_H) - set(SDL_VIDEO_RENDER_D3D 1) - set(HAVE_RENDER_D3D TRUE) - endif() - if(RENDER_D3D AND HAVE_D3D11_H) - set(SDL_VIDEO_RENDER_D3D11 1) - set(HAVE_RENDER_D3D TRUE) - endif() - set(HAVE_SDL_VIDEO TRUE) - endif() - - if(SDL_THREADS) - set(SDL_THREAD_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c - ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) - set(HAVE_SDL_THREADS TRUE) - endif() - - if(SDL_POWER) - set(SDL_POWER_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) - set(HAVE_SDL_POWER TRUE) - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_WINDOWS 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Libraries for Win32 native and MinGW - list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) - - # TODO: in configure.in the check for timers is set on - # cygwin | mingw32* - does this include mingw32CE? - if(SDL_TIMERS) - set(SDL_TIMER_WINDOWS 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(SDL_LOADSO) - set(SDL_LOADSO_WINDOWS 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(SDL_VIDEO) - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_WGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - if(SDL_JOYSTICK) - if(HAVE_DINPUT_H) - set(SDL_JOYSTICK_DINPUT 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_dxjoystick.c) - list(APPEND EXTRA_LIBS dinput8 dxguid dxerr) - else() - set(SDL_JOYSTICK_WINMM 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/joystick/windows/SDL_mmjoystick.c) - endif() - set(HAVE_SDL_JOYSTICK TRUE) - endif() - - if(SDL_HAPTIC AND HAVE_DINPUT_H) - set(SDL_HAPTIC_DINPUT 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/haptic/windows/SDL_syshaptic.c) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) - if(MINGW OR CYGWIN) - list(APPEND EXTRA_LIBS mingw32) - list(APPEND EXTRA_LDFLAGS "-mwindows") - set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") - list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") - endif() -elseif(APPLE) - # TODO: rework this for proper MacOS X, iOS and Darwin support - - # Requires the darwin file implementation - if(SDL_FILE) - file(GLOB EXTRA_SOURCES ${PROJECT_SOURCE_DIR}/src/file/cocoa/*.m) - set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) - set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) - set(HAVE_SDL_FILE TRUE) - set(SDL_FRAMEWORK_COCOA 1) - else() - message_error("SDL_FILE must be enabled to build on MacOS X") - endif() - - if(SDL_AUDIO) - set(MACOSX_COREAUDIO 1) - file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - set(SDL_FRAMEWORK_COREAUDIO 1) - set(SDL_FRAMEWORK_AUDIOUNIT 1) - endif() - - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_IOKIT 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - endif() - - if(SDL_HAPTIC) - set(SDL_HAPTIC_IOKIT 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - if(NOT SDL_JOYSTICK) - message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") - endif() - endif() - - if(SDL_POWER) - set(SDL_POWER_MACOSX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - set(SDL_FRAMEWORK_CARBON 1) - set(SDL_FRAMEWORK_IOKIT 1) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_COCOA 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) - set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Actually load the frameworks at the end so we don't duplicate include. - if(SDL_FRAMEWORK_COCOA) - find_library(COCOA_LIBRARY Cocoa) - list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) - endif() - if(SDL_FRAMEWORK_IOKIT) - find_library(IOKIT IOKit) - list(APPEND EXTRA_LIBS ${IOKIT}) - endif() - if(SDL_FRAMEWORK_FF) - find_library(FORCEFEEDBACK ForceFeedback) - list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) - endif() - if(SDL_FRAMEWORK_CARBON) - find_library(CARBON_LIBRARY Carbon) - list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) - endif() - if(SDL_FRAMEWORK_COREAUDIO) - find_library(COREAUDIO CoreAudio) - list(APPEND EXTRA_LIBS ${COREAUDIO}) - endif() - if(SDL_FRAMEWORK_AUDIOUNIT) - find_library(AUDIOUNIT AudioUnit) - list(APPEND EXTRA_LIBS ${AUDIOUNIT}) - endif() - - # iOS hack needed - http://code.google.com/p/ios-cmake/ ? - if(SDL_VIDEO) - CheckCOCOA() - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_CGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - if(DARWIN) - find_library(OpenGL_LIBRARY OpenGL) - list(APPEND EXTRA_LIBRARIES ${OpenGL_LIBRARY}) - endif() - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -elseif(HAIKU) - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_HAIKU 1) - file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - set(SDL_FILESYSTEM_HAIKU 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - - if(SDL_TIMERS) - set(SDL_TIMER_HAIKU 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(VIDEO_OPENGL) - # TODO: Use FIND_PACKAGE(OpenGL) instead - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_BGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - list(APPEND EXTRA_LIBS GL) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -endif() - -# Dummies -# configure.in does it differently: -# if not have X -# if enable_X { SDL_X_DISABLED = 1 } -# [add dummy sources] -# so it always adds a dummy, without checking, if it was actually requested. -# This leads to missing internal references on building, since the -# src/X/*.c does not get included. -if(NOT HAVE_SDL_JOYSTICK) - set(SDL_JOYSTICK_DISABLED 1) - if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX - - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - endif() -endif() -if(NOT HAVE_SDL_HAPTIC) - set(SDL_HAPTIC_DISABLED 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(NOT HAVE_SDL_LOADSO) - set(SDL_LOADSO_DISABLED 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) -endif() -if(NOT HAVE_SDL_FILESYSTEM) - set(SDL_FILESYSTEM_DISABLED 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) -endif() - -# We always need to have threads and timers around -if(NOT HAVE_SDL_THREADS) - set(SDL_THREADS_DISABLED 1) - file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) -endif() -if(NOT HAVE_SDL_TIMERS) - set(SDL_TIMERS_DISABLED 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) -endif() - -if(NOT SDLMAIN_SOURCES) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) -endif() - -# Append the -MMD -MT flags -# if(DEPENDENCY_TRACKING) -# if(COMPILER_IS_GNUCC) -# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") -# endif() -# endif() - -configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" - "${SDL2_BINARY_DIR}/include/SDL_config.h") - -# Prepare the flags and remove duplicates -if(EXTRA_LDFLAGS) - list(REMOVE_DUPLICATES EXTRA_LDFLAGS) -endif() -if(EXTRA_LIBS) - list(REMOVE_DUPLICATES EXTRA_LIBS) -endif() -if(EXTRA_CFLAGS) - list(REMOVE_DUPLICATES EXTRA_CFLAGS) -endif() -listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) -set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) - -# Compat helpers for the configuration files -if(NOT WINDOWS OR CYGWIN) - # TODO: we need a Windows script, too - execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) - - set(prefix ${CMAKE_INSTALL_PREFIX}) - set(exec_prefix "\${prefix}") - set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") - set(bindir "\${exec_prefix}/bin") - set(includedir "\${prefix}/include") - if(SDL_STATIC) - set(ENABLE_STATIC_TRUE "") - set(ENABLE_STATIC_FALSE "#") - else() - set(ENABLE_STATIC_TRUE "#") - set(ENABLE_STATIC_FALSE "") - endif() - if(SDL_SHARED) - set(ENABLE_SHARED_TRUE "") - set(ENABLE_SHARED_FALSE "#") - else() - set(ENABLE_SHARED_TRUE "#") - set(ENABLE_SHARED_FALSE "") - endif() - - # Clean up the different lists - listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") - set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) - list(REMOVE_DUPLICATES SDL_STATIC_LIBS) - listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) - set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) - listtostr(SDL_LIBS _SDL_LIBS) - set(SDL_LIBS ${_SDL_LIBS}) - - # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") - # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") - - configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" - "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config") - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" - "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) -endif() - -##### Info output ##### -message(STATUS "") -message(STATUS "SDL2 was configured with the following options:") -message(STATUS "") -message(STATUS "Platform: ${CMAKE_SYSTEM}") -message(STATUS "64-bit: ${ARCH_64}") -message(STATUS "Compiler: ${CMAKE_C_COMPILER}") -message(STATUS "") -message(STATUS "Subsystems:") -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - message_bool_option(${_SUB} SDL_${_OPT}) -endforeach() -message(STATUS "") -message(STATUS "Options:") -list(SORT ALLOPTIONS) -foreach(_OPT ${ALLOPTIONS}) - # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters - # Get the padding - string(LENGTH ${_OPT} _OPTLEN) - math(EXPR _PADLEN "23 - ${_OPTLEN}") - string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) - message_tested_option(${_OPT} ${_PADDING}) -endforeach() -message(STATUS "") -message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") -message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") -message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") -message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") -message(STATUS "") -message(STATUS " Build Shared Library: ${SDL_SHARED}") -message(STATUS " Build Static Library: ${SDL_STATIC}") -message(STATUS "") -if(UNIX) - message(STATUS "If something was not detected, although the libraries") - message(STATUS "were installed, then make sure you have set the") - message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") - message(STATUS "") -endif() - -# Ensure that the extra cflags are used at compile time -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") - -# Always build SDLmain -add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) -set(_INSTALL_LIBS "SDL2main") - -if(SDL_SHARED) - add_library(SDL2 SHARED ${SOURCE_FILES}) - if(UNIX) - set_target_properties(SDL2 PROPERTIES - VERSION ${LT_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2-${LT_RELEASE}") - else() - set_target_properties(SDL2 PROPERTIES - VERSION ${SDL_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2") - endif() - set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) - target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) -endif() - -if(SDL_STATIC) - set (BUILD_SHARED_LIBS FALSE) - add_library(SDL2-static STATIC ${SOURCE_FILES}) - set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") - if(MSVC) - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - # TODO: Win32 platforms keep the same suffix .lib for import and static - # libraries - do we need to consider this? - set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) - target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) -endif() - -##### Installation targets ##### -install(TARGETS ${_INSTALL_LIBS} - LIBRARY DESTINATION "lib${LIB_SUFFIX}" - ARCHIVE DESTINATION "lib${LIB_SUFFIX}") - -file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) -file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) -foreach(_FNAME ${BIN_INCLUDE_FILES}) - get_filename_component(_INCNAME ${_FNAME} NAME) - list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) -endforeach() -list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) -install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) - -if(NOT WINDOWS OR CYGWIN) - if(SDL_SHARED) - install(CODE " - execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink - \"libSDL2-2.0.so\" \"libSDL2.so\")") - install(FILES ${SDL2_BINARY_DIR}/libSDL2.so DESTINATION "lib${LIB_SUFFIX}") - endif() - if(FREEBSD) - # FreeBSD uses ${PREFIX}/libdata/pkgconfig - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") - else() - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc - DESTINATION "lib${LIB_SUFFIX}/pkgconfig") - endif() - install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) - # TODO: what about the .spec file? Is it only needed for RPM creation? - install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "share/aclocal") -else() - install(TARGETS SDL2 RUNTIME DESTINATION bin) -endif() - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt deleted file mode 100644 index bbad766..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sdl2/CMakeLists.txt +++ /dev/null @@ -1,1849 +0,0 @@ - - -cmake_minimum_required(VERSION 2.8.11) -project(SDL2 C) - -# !!! FIXME: this should probably do "MACOSX_RPATH ON" as a target property -# !!! FIXME: for the SDL2 shared library (so you get an -# !!! FIXME: install_name ("soname") of "@rpath/libSDL-whatever.dylib" -# !!! FIXME: instead of "/usr/local/lib/libSDL-whatever.dylib"), but I'm -# !!! FIXME: punting for now and leaving the existing behavior. Until this -# !!! FIXME: properly resolved, this line silences a warning in CMake 3.0+. -# !!! FIXME: remove it and this comment entirely once the problem is -# !!! FIXME: properly resolved. -#cmake_policy(SET CMP0042 OLD) - -include(CheckFunctionExists) -include(CheckLibraryExists) -include(CheckIncludeFiles) -include(CheckIncludeFile) -include(CheckSymbolExists) -include(CheckCSourceCompiles) -include(CheckCSourceRuns) -include(CheckCCompilerFlag) -include(CheckTypeSize) -include(CheckStructHasMember) -include(CMakeDependentOption) -include(FindPkgConfig) -include(GNUInstallDirs) -set(CMAKE_MODULE_PATH "${SDL2_SOURCE_DIR}/cmake") -include(${SDL2_SOURCE_DIR}/cmake/macros.cmake) -include(${SDL2_SOURCE_DIR}/cmake/sdlchecks.cmake) - -# General settings -# Edit include/SDL_version.h and change the version, then: -# SDL_MICRO_VERSION += 1; -# SDL_INTERFACE_AGE += 1; -# SDL_BINARY_AGE += 1; -# if any functions have been added, set SDL_INTERFACE_AGE to 0. -# if backwards compatibility has been broken, -# set SDL_BINARY_AGE and SDL_INTERFACE_AGE to 0. -set(SDL_MAJOR_VERSION 2) -set(SDL_MINOR_VERSION 0) -set(SDL_MICRO_VERSION 8) -set(SDL_INTERFACE_AGE 0) -set(SDL_BINARY_AGE 8) -set(SDL_VERSION "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}.${SDL_MICRO_VERSION}") - -# Set defaults preventing destination file conflicts -set(SDL_CMAKE_DEBUG_POSTFIX "d" - CACHE STRING "Name suffix for debug builds") - -mark_as_advanced(CMAKE_IMPORT_LIBRARY_SUFFIX SDL_CMAKE_DEBUG_POSTFIX) - -# Calculate a libtool-like version number -math(EXPR LT_CURRENT "${SDL_MICRO_VERSION} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_AGE "${SDL_BINARY_AGE} - ${SDL_INTERFACE_AGE}") -math(EXPR LT_MAJOR "${LT_CURRENT}- ${LT_AGE}") -set(LT_REVISION "${SDL_INTERFACE_AGE}") -set(LT_RELEASE "${SDL_MAJOR_VERSION}.${SDL_MINOR_VERSION}") -set(LT_VERSION "${LT_MAJOR}.${LT_AGE}.${LT_REVISION}") - -message(STATUS "${LT_VERSION} :: ${LT_AGE} :: ${LT_REVISION} :: ${LT_CURRENT} :: ${LT_RELEASE}") - -# General settings & flags -set(LIBRARY_OUTPUT_DIRECTORY "build") -# Check for 64 or 32 bit -set(SIZEOF_VOIDP ${CMAKE_SIZEOF_VOID_P}) -if(CMAKE_SIZEOF_VOID_P EQUAL 8) - set(ARCH_64 TRUE) - set(PROCESSOR_ARCH "x64") -else() - set(ARCH_64 FALSE) - set(PROCESSOR_ARCH "x86") -endif() -set(LIBNAME SDL2) -if(NOT LIBTYPE) - set(LIBTYPE SHARED) -endif() - -# Get the platform -if(WIN32) - if(NOT WINDOWS) - set(WINDOWS TRUE) - endif() -elseif(UNIX AND NOT APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Linux") - set(LINUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kFreeBSD.*") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kNetBSD.*|NetBSD.*") - set(NETBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "kOpenBSD.*|OpenBSD.*") - set(OPENBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*GNU.*") - set(GNU TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*BSDI.*") - set(BSDI TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "DragonFly.*|FreeBSD") - set(FREEBSD TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "SYSV5.*") - set(SYSV5 TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Solaris.*") - set(SOLARIS TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "HP-UX.*") - set(HPUX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "AIX.*") - set(AIX TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES "Minix.*") - set(MINIX TRUE) - endif() -elseif(APPLE) - if(CMAKE_SYSTEM_NAME MATCHES ".*Darwin.*") - set(DARWIN TRUE) - elseif(CMAKE_SYSTEM_NAME MATCHES ".*MacOS.*") - set(MACOSX TRUE) - endif() - # TODO: iOS? -elseif(CMAKE_SYSTEM_NAME MATCHES "BeOS.*") - message_error("BeOS support has been removed as of SDL 2.0.2.") -elseif(CMAKE_SYSTEM_NAME MATCHES "Haiku.*") - set(HAIKU TRUE) -endif() - -# Don't mistake osx for unix -if(UNIX AND NOT APPLE) - set(UNIX_SYS ON) -else() - set(UNIX_SYS OFF) -endif() - -if(UNIX OR APPLE) - set(UNIX_OR_MAC_SYS ON) -else() - set(UNIX_OR_MAC_SYS OFF) -endif() - -if (UNIX_OR_MAC_SYS AND NOT EMSCRIPTEN) # JavaScript does not yet have threading support, so disable pthreads when building for Emscripten. - set(SDL_PTHREADS_ENABLED_BY_DEFAULT ON) -else() - set(SDL_PTHREADS_ENABLED_BY_DEFAULT OFF) -endif() - -# Default option knobs -if(APPLE OR ARCH_64) - if(NOT "${CMAKE_OSX_ARCHITECTURES}" MATCHES "arm") - set(OPT_DEF_SSEMATH ON) - endif() -endif() -if(UNIX OR MINGW OR MSYS) - set(OPT_DEF_LIBC ON) -endif() - -# Compiler info -if(CMAKE_COMPILER_IS_GNUCC) - set(USE_GCC TRUE) - set(OPT_DEF_ASM TRUE) -elseif(CMAKE_C_COMPILER_ID MATCHES "Clang") - set(USE_CLANG TRUE) - set(OPT_DEF_ASM TRUE) -elseif(MSVC_VERSION GREATER 1400) # VisualStudio 8.0+ - set(OPT_DEF_ASM TRUE) - #set(CMAKE_C_FLAGS "/ZI /WX- / -else() - set(OPT_DEF_ASM FALSE) -endif() - -if(USE_GCC OR USE_CLANG) - set(OPT_DEF_GCC_ATOMICS ON) -endif() - -# Default flags, if not set otherwise -if("$ENV{CFLAGS}" STREQUAL "") - if(CMAKE_BUILD_TYPE STREQUAL "") - if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "-g -O3") - endif() - endif() -else() - set(CMAKE_C_FLAGS "$ENV{CFLAGS}") - list(APPEND EXTRA_CFLAGS "$ENV{CFLAGS}") -endif() -if(NOT ("$ENV{CFLAGS}" STREQUAL "")) # Hackish, but does the trick on Win32 - list(APPEND EXTRA_LDFLAGS "$ENV{LDFLAGS}") -endif() - -if(MSVC) - option(FORCE_STATIC_VCRT "Force /MT for static VC runtimes" OFF) - if(FORCE_STATIC_VCRT) - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - if(${flag_var} MATCHES "/MD") - string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") - endif() - endforeach() - endif() - - # Make sure /RTC1 is disabled, otherwise it will use functions from the CRT - foreach(flag_var - CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE - CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO) - string(REGEX REPLACE "/RTC(su|[1su])" "" ${flag_var} "${${flag_var}}") - endforeach(flag_var) -endif() - -# Those are used for pkg-config and friends, so that the SDL2.pc, sdl2-config, -# etc. are created correctly. -set(SDL_LIBS "-lSDL2") -set(SDL_CFLAGS "") - -# Emscripten toolchain has a nonempty default value for this, and the checks -# in this file need to change that, so remember the original value, and -# restore back to that afterwards. For check_function_exists() to work in -# Emscripten, this value must be at its default value. -set(ORIG_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) - -if(CYGWIN) - # We build SDL on cygwin without the UNIX emulation layer - include_directories("-I/usr/include/mingw") - set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mno-cygwin") - check_c_source_compiles("int main(int argc, char **argv) {}" - HAVE_GCC_NO_CYGWIN) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_GCC_NO_CYGWIN) - list(APPEND EXTRA_LDFLAGS "-mno-cygwin") - list(APPEND SDL_LIBS "-mno-cygwin") - endif() - set(SDL_CFLAGS "${SDL_CFLAGS} -I/usr/include/mingw") -endif() - -add_definitions(-DUSING_GENERATED_CONFIG_H) -# General includes -include_directories(${SDL2_BINARY_DIR}/include ${SDL2_SOURCE_DIR}/include) -if(USE_GCC OR USE_CLANG) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -idirafter ${SDL2_SOURCE_DIR}/src/video/khronos") -else() - include_directories(${SDL2_SOURCE_DIR}/src/video/khronos) -endif() - -# All these ENABLED_BY_DEFAULT vars will default to ON if not specified, so -# you only need to have a platform override them if they are disabling. -set(OPT_DEF_ASM TRUE) -if(EMSCRIPTEN) - # Set up default values for the currently supported set of subsystems: - # Emscripten/Javascript does not have assembly support, a dynamic library - # loading architecture, low-level CPU inspection or multithreading. - set(OPT_DEF_ASM FALSE) - set(SDL_SHARED_ENABLED_BY_DEFAULT OFF) - set(SDL_ATOMIC_ENABLED_BY_DEFAULT OFF) - set(SDL_THREADS_ENABLED_BY_DEFAULT OFF) - set(SDL_LOADSO_ENABLED_BY_DEFAULT OFF) - set(SDL_CPUINFO_ENABLED_BY_DEFAULT OFF) - set(SDL_DLOPEN_ENABLED_BY_DEFAULT OFF) -endif() - -if (NOT DEFINED SDL_SHARED_ENABLED_BY_DEFAULT) - set(SDL_SHARED_ENABLED_BY_DEFAULT ON) -endif() - -set(SDL_SUBSYSTEMS - Atomic Audio Video Render Events Joystick Haptic Power Threads Timers - File Loadso CPUinfo Filesystem Dlopen) -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if (NOT DEFINED SDL_${_OPT}_ENABLED_BY_DEFAULT) - set(SDL_${_OPT}_ENABLED_BY_DEFAULT ON) - endif() - option(SDL_${_OPT} "Enable the ${_SUB} subsystem" ${SDL_${_OPT}_ENABLED_BY_DEFAULT}) -endforeach() - -option_string(ASSERTIONS "Enable internal sanity checks (auto/disabled/release/enabled/paranoid)" "auto") -#set_option(DEPENDENCY_TRACKING "Use gcc -MMD -MT dependency tracking" ON) -set_option(LIBC "Use the system C library" ${OPT_DEF_LIBC}) -set_option(GCC_ATOMICS "Use gcc builtin atomics" ${OPT_DEF_GCC_ATOMICS}) -set_option(ASSEMBLY "Enable assembly routines" ${OPT_DEF_ASM}) -set_option(SSEMATH "Allow GCC to use SSE floating point math" ${OPT_DEF_SSEMATH}) -set_option(MMX "Use MMX assembly routines" ${OPT_DEF_ASM}) -set_option(3DNOW "Use 3Dnow! MMX assembly routines" ${OPT_DEF_ASM}) -set_option(SSE "Use SSE assembly routines" ${OPT_DEF_ASM}) -set_option(SSE2 "Use SSE2 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(SSE3 "Use SSE3 assembly routines" ${OPT_DEF_SSEMATH}) -set_option(ALTIVEC "Use Altivec assembly routines" ${OPT_DEF_ASM}) -set_option(DISKAUDIO "Support the disk writer audio driver" ON) -set_option(DUMMYAUDIO "Support the dummy audio driver" ON) -set_option(VIDEO_DIRECTFB "Use DirectFB video driver" OFF) -dep_option(DIRECTFB_SHARED "Dynamically load directfb support" ON "VIDEO_DIRECTFB" OFF) -set_option(VIDEO_DUMMY "Use dummy video driver" ON) -set_option(VIDEO_OPENGL "Include OpenGL support" ON) -set_option(VIDEO_OPENGLES "Include OpenGL ES support" ON) -set_option(PTHREADS "Use POSIX threads for multi-threading" ${SDL_PTHREADS_ENABLED_BY_DEFAULT}) -dep_option(PTHREADS_SEM "Use pthread semaphores" ON "PTHREADS" OFF) -set_option(SDL_DLOPEN "Use dlopen for shared object loading" ${SDL_DLOPEN_ENABLED_BY_DEFAULT}) -set_option(OSS "Support the OSS audio API" ${UNIX_SYS}) -set_option(ALSA "Support the ALSA audio API" ${UNIX_SYS}) -dep_option(ALSA_SHARED "Dynamically load ALSA audio support" ON "ALSA" OFF) -set_option(JACK "Support the JACK audio API" ${UNIX_SYS}) -dep_option(JACK_SHARED "Dynamically load JACK audio support" ON "JACK" OFF) -set_option(ESD "Support the Enlightened Sound Daemon" ${UNIX_SYS}) -dep_option(ESD_SHARED "Dynamically load ESD audio support" ON "ESD" OFF) -set_option(PULSEAUDIO "Use PulseAudio" ${UNIX_SYS}) -dep_option(PULSEAUDIO_SHARED "Dynamically load PulseAudio support" ON "PULSEAUDIO" OFF) -set_option(ARTS "Support the Analog Real Time Synthesizer" ${UNIX_SYS}) -dep_option(ARTS_SHARED "Dynamically load aRts audio support" ON "ARTS" OFF) -set_option(NAS "Support the NAS audio API" ${UNIX_SYS}) -set_option(NAS_SHARED "Dynamically load NAS audio API" ${UNIX_SYS}) -set_option(SNDIO "Support the sndio audio API" ${UNIX_SYS}) -set_option(FUSIONSOUND "Use FusionSound audio driver" OFF) -dep_option(FUSIONSOUND_SHARED "Dynamically load fusionsound audio support" ON "FUSIONSOUND" OFF) -set_option(LIBSAMPLERATE "Use libsamplerate for audio rate conversion" ${UNIX_SYS}) -dep_option(LIBSAMPLERATE_SHARED "Dynamically load libsamplerate" ON "LIBSAMPLERATE" OFF) -set_option(RPATH "Use an rpath when linking SDL" ${UNIX_SYS}) -set_option(CLOCK_GETTIME "Use clock_gettime() instead of gettimeofday()" OFF) -set_option(INPUT_TSLIB "Use the Touchscreen library for input" ${UNIX_SYS}) -set_option(VIDEO_X11 "Use X11 video driver" ${UNIX_SYS}) -set_option(VIDEO_WAYLAND "Use Wayland video driver" ${UNIX_SYS}) -dep_option(WAYLAND_SHARED "Dynamically load Wayland support" ON "VIDEO_WAYLAND" OFF) -dep_option(VIDEO_WAYLAND_QT_TOUCH "QtWayland server support for Wayland video driver" ON "VIDEO_WAYLAND" OFF) -set_option(VIDEO_MIR "Use Mir video driver" ${UNIX_SYS}) -dep_option(MIR_SHARED "Dynamically load Mir support" ON "VIDEO_MIR" OFF) -set_option(VIDEO_RPI "Use Raspberry Pi video driver" ${UNIX_SYS}) -dep_option(X11_SHARED "Dynamically load X11 support" ON "VIDEO_X11" OFF) -set(SDL_X11_OPTIONS Xcursor Xinerama XInput Xrandr Xscrnsaver XShape Xvm) -foreach(_SUB ${SDL_X11_OPTIONS}) - string(TOUPPER "VIDEO_X11_${_SUB}" _OPT) - dep_option(${_OPT} "Enable ${_SUB} support" ON "VIDEO_X11" OFF) -endforeach() -set_option(VIDEO_COCOA "Use Cocoa video driver" ${APPLE}) -set_option(DIRECTX "Use DirectX for Windows audio/video" ${WINDOWS}) -set_option(RENDER_D3D "Enable the Direct3D render driver" ${WINDOWS}) -set_option(VIDEO_VIVANTE "Use Vivante EGL video driver" ${UNIX_SYS}) -dep_option(VIDEO_VULKAN "Enable Vulkan support" ON "ANDROID OR APPLE OR LINUX OR WINDOWS" OFF) -set_option(VIDEO_KMSDRM "Use KMS DRM video driver" ${UNIX_SYS}) -dep_option(KMSDRM_SHARED "Dynamically load KMS DRM support" ON "VIDEO_KMSDRM" OFF) - -# TODO: We should (should we?) respect cmake's ${BUILD_SHARED_LIBS} flag here -# The options below are for compatibility to configure's default behaviour. -# set(SDL_SHARED ${SDL_SHARED_ENABLED_BY_DEFAULT} CACHE BOOL "Build a shared version of the library") -set(SDL_SHARED ON CACHE BOOL "Build a shared version of the library") -set(SDL_STATIC OFF CACHE BOOL "Build a static version of the library") - -dep_option(SDL_STATIC_PIC "Static version of the library should be built with Position Independent Code" OFF "SDL_STATIC" OFF) -set_option(SDL_TEST "Build the test directory" OFF) - - -# General source files -file(GLOB SOURCE_FILES - ${SDL2_SOURCE_DIR}/src/*.c - ${SDL2_SOURCE_DIR}/src/atomic/*.c - ${SDL2_SOURCE_DIR}/src/audio/*.c - ${SDL2_SOURCE_DIR}/src/cpuinfo/*.c - ${SDL2_SOURCE_DIR}/src/dynapi/*.c - ${SDL2_SOURCE_DIR}/src/events/*.c - ${SDL2_SOURCE_DIR}/src/file/*.c - ${SDL2_SOURCE_DIR}/src/libm/*.c - ${SDL2_SOURCE_DIR}/src/render/*.c - ${SDL2_SOURCE_DIR}/src/render/*/*.c - ${SDL2_SOURCE_DIR}/src/stdlib/*.c - ${SDL2_SOURCE_DIR}/src/thread/*.c - ${SDL2_SOURCE_DIR}/src/timer/*.c - ${SDL2_SOURCE_DIR}/src/video/*.c - ${SDL2_SOURCE_DIR}/src/video/yuv2rgb/*.c) - - -if(ASSERTIONS STREQUAL "auto") - # Do nada - use optimization settings to determine the assertion level -elseif(ASSERTIONS STREQUAL "disabled") - set(SDL_DEFAULT_ASSERT_LEVEL 0) -elseif(ASSERTIONS STREQUAL "release") - set(SDL_DEFAULT_ASSERT_LEVEL 1) -elseif(ASSERTIONS STREQUAL "enabled") - set(SDL_DEFAULT_ASSERT_LEVEL 2) -elseif(ASSERTIONS STREQUAL "paranoid") - set(SDL_DEFAULT_ASSERT_LEVEL 3) -else() - message_error("unknown assertion level") -endif() -set(HAVE_ASSERTIONS ${ASSERTIONS}) - -# Compiler option evaluation -if(USE_GCC OR USE_CLANG) - # Check for -Wall first, so later things can override pieces of it. - check_c_compiler_flag(-Wall HAVE_GCC_WALL) - if(HAVE_GCC_WALL) - list(APPEND EXTRA_CFLAGS "-Wall") - if(HAIKU) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-multichar") - endif() - endif() - - check_c_compiler_flag(-Wdeclaration-after-statement HAVE_GCC_WDECLARATION_AFTER_STATEMENT) - if(HAVE_GCC_WDECLARATION_AFTER_STATEMENT) - check_c_compiler_flag(-Werror=declaration-after-statement HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) - if(HAVE_GCC_WERROR_DECLARATION_AFTER_STATEMENT) - list(APPEND EXTRA_CFLAGS "-Werror=declaration-after-statement") - endif() - list(APPEND EXTRA_CFLAGS "-Wdeclaration-after-statement") - endif() - - if(DEPENDENCY_TRACKING) - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 3 - #error Dependency tracking requires GCC 3.0 or newer - #endif - int main(int argc, char **argv) { }" HAVE_DEPENDENCY_TRACKING) - endif() - - if(GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - void *x, *y, *z; - __sync_lock_test_and_set(&a, 4); - __sync_lock_test_and_set(&x, y); - __sync_fetch_and_add(&a, 1); - __sync_bool_compare_and_swap(&a, 5, 10); - __sync_bool_compare_and_swap(&x, y, z); }" HAVE_GCC_ATOMICS) - if(NOT HAVE_GCC_ATOMICS) - check_c_source_compiles("int main(int argc, char **argv) { - int a; - __sync_lock_test_and_set(&a, 1); - __sync_lock_release(&a); }" HAVE_GCC_SYNC_LOCK_TEST_AND_SET) - endif() - endif() - - set(CMAKE_REQUIRED_FLAGS "-mpreferred-stack-boundary=2") - check_c_source_compiles("int x = 0; int main(int argc, char **argv) {}" - HAVE_GCC_PREFERRED_STACK_BOUNDARY) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - set(CMAKE_REQUIRED_FLAGS "-fvisibility=hidden -Werror") - check_c_source_compiles(" - #if !defined(__GNUC__) || __GNUC__ < 4 - #error SDL only uses visibility attributes in GCC 4 or newer - #endif - int main(int argc, char **argv) {}" HAVE_GCC_FVISIBILITY) - if(HAVE_GCC_FVISIBILITY) - list(APPEND EXTRA_CFLAGS "-fvisibility=hidden") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - - check_c_compiler_flag(-Wshadow HAVE_GCC_WSHADOW) - if(HAVE_GCC_WSHADOW) - list(APPEND EXTRA_CFLAGS "-Wshadow") - endif() - - if(APPLE) - list(APPEND EXTRA_LDFLAGS "-Wl,-undefined,error") - else() - set(CMAKE_REQUIRED_FLAGS "-Wl,--no-undefined") - check_c_compiler_flag("" HAVE_NO_UNDEFINED) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_NO_UNDEFINED) - list(APPEND EXTRA_LDFLAGS "-Wl,--no-undefined") - endif() - endif() -endif() - -if(ASSEMBLY) - if(USE_GCC OR USE_CLANG) - set(SDL_ASSEMBLY_ROUTINES 1) - # TODO: Those all seem to be quite GCC specific - needs to be - # reworked for better compiler support - set(HAVE_ASSEMBLY TRUE) - if(MMX) - set(CMAKE_REQUIRED_FLAGS "-mmmx") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __MMX__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_MMX) - if(HAVE_MMX) - list(APPEND EXTRA_CFLAGS "-mmmx") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(3DNOW) - set(CMAKE_REQUIRED_FLAGS "-m3dnow") - check_c_source_compiles(" - #include - #ifndef __3dNOW__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { - void *p = 0; - _m_prefetch(p); - }" HAVE_3DNOW) - if(HAVE_3DNOW) - list(APPEND EXTRA_CFLAGS "-m3dnow") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE) - set(CMAKE_REQUIRED_FLAGS "-msse") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE) - if(HAVE_SSE) - list(APPEND EXTRA_CFLAGS "-msse") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE2) - set(CMAKE_REQUIRED_FLAGS "-msse2") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE2__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE2) - if(HAVE_SSE2) - list(APPEND EXTRA_CFLAGS "-msse2") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SSE3) - set(CMAKE_REQUIRED_FLAGS "-msse3") - check_c_source_compiles(" - #ifdef __MINGW32__ - #include <_mingw.h> - #ifdef __MINGW64_VERSION_MAJOR - #include - #else - #include - #endif - #else - #include - #endif - #ifndef __SSE3__ - #error Assembler CPP flag not enabled - #endif - int main(int argc, char **argv) { }" HAVE_SSE3) - if(HAVE_SSE3) - list(APPEND EXTRA_CFLAGS "-msse3") - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(NOT SSEMATH) - if(SSE OR SSE2 OR SSE3) - if(USE_GCC) - check_c_compiler_flag(-mfpmath=387 HAVE_FP_387) - if(HAVE_FP_387) - list(APPEND EXTRA_CFLAGS "-mfpmath=387") - endif() - endif() - set(HAVE_SSEMATH TRUE) - endif() - endif() - - check_include_file("immintrin.h" HAVE_IMMINTRIN_H) - - if(ALTIVEC) - set(CMAKE_REQUIRED_FLAGS "-maltivec") - check_c_source_compiles(" - #include - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC_H_HDR) - check_c_source_compiles(" - vector unsigned int vzero() { - return vec_splat_u32(0); - } - int main(int argc, char **argv) { }" HAVE_ALTIVEC) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ALTIVEC OR HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC TRUE) # if only HAVE_ALTIVEC_H_HDR is set - list(APPEND EXTRA_CFLAGS "-maltivec") - set(SDL_ALTIVEC_BLITTERS 1) - if(HAVE_ALTIVEC_H_HDR) - set(HAVE_ALTIVEC_H 1) - endif() - endif() - endif() - elseif(MSVC_VERSION GREATER 1500) - # TODO: SDL_cpuinfo.h needs to support the user's configuration wish - # for MSVC - right now it is always activated - if(NOT ARCH_64) - set(HAVE_MMX TRUE) - set(HAVE_3DNOW TRUE) - endif() - set(HAVE_SSE TRUE) - set(HAVE_SSE2 TRUE) - set(HAVE_SSE3 TRUE) - set(SDL_ASSEMBLY_ROUTINES 1) - endif() -# TODO: -#else() -# if(USE_GCC OR USE_CLANG) -# list(APPEND EXTRA_CFLAGS "-mno-sse" "-mno-sse2" "-mno-sse3" "-mno-mmx") -# endif() -endif() - -# TODO: Can't deactivate on FreeBSD? w/o LIBC, SDL_stdinc.h can't define -# anything. -if(LIBC) - if(WINDOWS AND NOT MINGW) - set(HAVE_LIBC TRUE) - foreach(_HEADER stdio.h string.h wchar.h ctype.h math.h limits.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - set(${_HAVE_H} 1) - endforeach() - set(HAVE_SIGNAL_H 1) - foreach(_FN - malloc calloc realloc free qsort abs memset memcpy memmove memcmp - wcslen wcscmp - strlen _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _ultoa strtol strtoul strtoll strtod atoi atof strcmp strncmp - _stricmp _strnicmp sscanf - acos acosf asin asinf atan atanf atan2 atan2f ceil ceilf - copysign copysignf cos cosf fabs fabsf floor floorf fmod fmodf - log logf log10 log10f pow powf scalbn scalbnf sin sinf sqrt sqrtf tan tanf) - string(TOUPPER ${_FN} _UPPER) - set(HAVE_${_UPPER} 1) - endforeach() - if(NOT CYGWIN AND NOT MINGW) - set(HAVE_ALLOCA 1) - endif() - set(HAVE_M_PI 1) - add_definitions(-D_USE_MATH_DEFINES) # needed for M_PI - set(STDC_HEADERS 1) - else() - set(HAVE_LIBC TRUE) - check_include_file(sys/types.h HAVE_SYS_TYPES_H) - foreach(_HEADER - stdio.h stdlib.h stddef.h stdarg.h malloc.h memory.h string.h limits.h - strings.h wchar.h inttypes.h stdint.h ctype.h math.h iconv.h signal.h libunwind.h) - string(TOUPPER "HAVE_${_HEADER}" _UPPER) - string(REPLACE "." "_" _HAVE_H ${_UPPER}) - check_include_file("${_HEADER}" ${_HAVE_H}) - endforeach() - - check_include_files("dlfcn.h;stdint.h;stddef.h;inttypes.h;stdlib.h;strings.h;string.h;float.h" STDC_HEADERS) - check_type_size("size_t" SIZEOF_SIZE_T) - check_symbol_exists(M_PI math.h HAVE_M_PI) - # TODO: refine the mprotect check - check_c_source_compiles("#include - #include - int main() { }" HAVE_MPROTECT) - foreach(_FN - strtod malloc calloc realloc free getenv setenv putenv unsetenv - qsort abs bcopy memset memcpy memmove memcmp strlen strlcpy strlcat - _strrev _strupr _strlwr strchr strrchr strstr itoa _ltoa - _uitoa _ultoa strtol strtoul _i64toa _ui64toa strtoll strtoull - atoi atof strcmp strncmp _stricmp strcasecmp _strnicmp strncasecmp - vsscanf vsnprintf fopen64 fseeko fseeko64 sigaction setjmp - nanosleep sysconf sysctlbyname getauxval poll - ) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - - check_library_exists(m pow "" HAVE_LIBM) - if(HAVE_LIBM) - set(CMAKE_REQUIRED_LIBRARIES m) - foreach(_FN - atan atan2 ceil copysign cos cosf fabs floor log pow scalbn sin - sinf sqrt sqrtf tan tanf acos asin) - string(TOUPPER ${_FN} _UPPER) - set(_HAVEVAR "HAVE_${_UPPER}") - check_function_exists("${_FN}" ${_HAVEVAR}) - endforeach() - set(CMAKE_REQUIRED_LIBRARIES) - list(APPEND EXTRA_LIBS m) - endif() - - check_library_exists(iconv iconv_open "" HAVE_LIBICONV) - if(HAVE_LIBICONV) - list(APPEND EXTRA_LIBS iconv) - set(HAVE_ICONV 1) - endif() - - if(NOT APPLE) - check_include_file(alloca.h HAVE_ALLOCA_H) - check_function_exists(alloca HAVE_ALLOCA) - else() - set(HAVE_ALLOCA_H 1) - set(HAVE_ALLOCA 1) - endif() - - check_struct_has_member("struct sigaction" "sa_sigaction" "signal.h" HAVE_SA_SIGACTION) - endif() -else() - if(WINDOWS) - set(HAVE_STDARG_H 1) - set(HAVE_STDDEF_H 1) - endif() -endif() - - -# Enable/disable various subsystems of the SDL library -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - if(NOT SDL_${_OPT}) - set(SDL_${_OPT}_DISABLED 1) - endif() -endforeach() -if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) -endif() -if(SDL_HAPTIC) - if(NOT SDL_JOYSTICK) - # Haptic requires some private functions from the joystick subsystem. - message_error("SDL_HAPTIC requires SDL_JOYSTICK, which is not enabled") - endif() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(SDL_POWER) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) -endif() -# TODO: in configure.in, the test for LOADSO and SDL_DLOPEN is a bit weird: -# if LOADSO is not wanted, SDL_LOADSO_DISABLED is set -# If however on Unix or APPLE dlopen() is detected via CheckDLOPEN(), -# SDL_LOADSO_DISABLED will not be set, regardless of the LOADSO settings - -# General SDL subsystem options, valid for all platforms -if(SDL_AUDIO) - # CheckDummyAudio/CheckDiskAudio - valid for all platforms - if(DUMMYAUDIO) - set(SDL_AUDIO_DRIVER_DUMMY 1) - file(GLOB DUMMYAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DUMMYAUDIO_SOURCES}) - set(HAVE_DUMMYAUDIO TRUE) - endif() - if(DISKAUDIO) - set(SDL_AUDIO_DRIVER_DISK 1) - file(GLOB DISKAUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/disk/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DISKAUDIO_SOURCES}) - set(HAVE_DISKAUDIO TRUE) - endif() -endif() - -if(SDL_DLOPEN) - # Relevant for Unix/Darwin only - if(UNIX OR APPLE) - CheckDLOPEN() - endif() -endif() - -if(SDL_VIDEO) - if(VIDEO_DUMMY) - set(SDL_VIDEO_DRIVER_DUMMY 1) - file(GLOB VIDEO_DUMMY_SOURCES ${SDL2_SOURCE_DIR}/src/video/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${VIDEO_DUMMY_SOURCES}) - set(HAVE_VIDEO_DUMMY TRUE) - set(HAVE_SDL_VIDEO TRUE) - endif() -endif() - -if(ANDROID) - file(GLOB ANDROID_CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_CORE_SOURCES}) - - # SDL_spinlock.c Needs to be compiled in ARM mode. - # There seems to be no better way currently to set the ARM mode. - # see: https://issuetracker.google.com/issues/62264618 - # Another option would be to set ARM mode to all compiled files - check_c_compiler_flag(-marm HAVE_ARM_MODE) - if(HAVE_ARM_MODE) - set_source_files_properties(${SDL2_SOURCE_DIR}/src/atomic/SDL_spinlock.c PROPERTIES COMPILE_FLAGS -marm) - endif() - - file(GLOB ANDROID_MAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/android/*.c) - set(SDLMAIN_SOURCES ${SDLMAIN_SOURCES} ${ANDROID_MAIN_SOURCES}) - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_ANDROID 1) - file(GLOB ANDROID_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_ANDROID 1) - file(GLOB ANDROID_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - if(SDL_HAPTIC) - set(SDL_HAPTIC_ANDROID 1) - file(GLOB ANDROID_HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_ANDROID 1) - file(GLOB ANDROID_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/android/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - if(SDL_LOADSO) - set(SDL_LOADSO_DLOPEN 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dlopen/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - if(SDL_POWER) - set(SDL_POWER_ANDROID 1) - file(GLOB ANDROID_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_ANDROID 1) - file(GLOB ANDROID_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/android/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${ANDROID_VIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - # Core stuff - find_library(ANDROID_DL_LIBRARY dl) - find_library(ANDROID_LOG_LIBRARY log) - find_library(ANDROID_LIBRARY_LIBRARY android) - list(APPEND EXTRA_LIBS ${ANDROID_DL_LIBRARY} ${ANDROID_LOG_LIBRARY} ${ANDROID_LIBRARY_LIBRARY}) - add_definitions(-DGL_GLEXT_PROTOTYPES) - - #enable gles - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(HAVE_VIDEO_OPENGLES TRUE) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - - find_library(OpenGLES1_LIBRARY GLESv1_CM) - find_library(OpenGLES2_LIBRARY GLESv2) - list(APPEND EXTRA_LIBS ${OpenGLES1_LIBRARY} ${OpenGLES2_LIBRARY}) - endif() - - CHECK_C_SOURCE_COMPILES(" - #if defined(__ARM_ARCH) && __ARM_ARCH < 7 - #error Vulkan doesn't work on this configuration - #endif - int main() - { - return 0; - } - " VULKAN_PASSED_ANDROID_CHECKS) - if(NOT VULKAN_PASSED_ANDROID_CHECKS) - set(VIDEO_VULKAN OFF) - message(STATUS "Vulkan doesn't work on this configuration") - endif() - endif() - - CheckPTHREAD() - -endif() - -# Platform-specific options and settings -if(EMSCRIPTEN) - # Hide noisy warnings that intend to aid mostly during initial stages of porting a new - # project. Uncomment at will for verbose cross-compiling -I/../ path info. - add_definitions(-Wno-warn-absolute-paths) - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_EMSCRIPTEN 1) - file(GLOB EM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_EMSCRIPTEN 1) - file(GLOB EM_FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_EMSCRIPTEN 1) - file(GLOB EM_JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - if(SDL_POWER) - set(SDL_POWER_EMSCRIPTEN 1) - file(GLOB EM_POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - - if(CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_EMSCRIPTEN 1) - file(GLOB EM_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/emscripten/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${EM_VIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - #enable gles - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(HAVE_VIDEO_OPENGLES TRUE) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - endif() - endif() -elseif(UNIX AND NOT APPLE AND NOT ANDROID) - if(SDL_AUDIO) - if(SYSV5 OR SOLARIS OR HPUX) - set(SDL_AUDIO_DRIVER_SUNAUDIO 1) - file(GLOB SUN_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/sun/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${SUN_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(NETBSD) - set(SDL_AUDIO_DRIVER_NETBSD 1) - file(GLOB NETBSD_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/netbsd/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${NETBSD_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - elseif(AIX) - set(SDL_AUDIO_DRIVER_PAUDIO 1) - file(GLOB AIX_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/paudio/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${AIX_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - endif() - CheckOSS() - CheckALSA() - CheckJACK() - CheckPulseAudio() - CheckESD() - CheckARTS() - CheckNAS() - CheckSNDIO() - CheckFusionSound() - CheckLibSampleRate() - endif() - - if(SDL_VIDEO) - # Need to check for Raspberry PI first and add platform specific compiler flags, otherwise the test for GLES fails! - CheckRPI() - CheckX11() - CheckMir() - CheckDirectFB() - CheckOpenGLX11() - CheckOpenGLESX11() - CheckWayland() - CheckVivante() - CheckKMSDRM() - endif() - - if(UNIX) - file(GLOB CORE_UNIX_SOURCES ${SDL2_SOURCE_DIR}/src/core/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_UNIX_SOURCES}) - endif() - - if(LINUX) - check_c_source_compiles(" - #include - #ifndef EVIOCGNAME - #error EVIOCGNAME() ioctl not available - #endif - int main(int argc, char** argv) {}" HAVE_INPUT_EVENTS) - - check_c_source_compiles(" - #include - #include - - int main(int argc, char **argv) - { - struct kbentry kbe; - kbe.kb_table = KG_CTRL; - ioctl(0, KDGKBENT, &kbe); - }" HAVE_INPUT_KD) - - file(GLOB CORE_LINUX_SOURCES ${SDL2_SOURCE_DIR}/src/core/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_LINUX_SOURCES}) - - if(HAVE_INPUT_EVENTS) - set(SDL_INPUT_LINUXEV 1) - endif() - - if(SDL_HAPTIC AND HAVE_INPUT_EVENTS) - set(SDL_HAPTIC_LINUX 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - - if(HAVE_INPUT_KD) - set(SDL_INPUT_LINUXKD 1) - endif() - - check_include_file("libudev.h" HAVE_LIBUDEV_H) - - if(PKG_CONFIG_FOUND) - pkg_search_module(DBUS dbus-1 dbus) - if(DBUS_FOUND) - set(HAVE_DBUS_DBUS_H TRUE) - include_directories(${DBUS_INCLUDE_DIRS}) - list(APPEND EXTRA_LIBS ${DBUS_LIBRARIES}) - endif() - - pkg_search_module(IBUS ibus-1.0 ibus) - if(IBUS_FOUND) - set(HAVE_IBUS_IBUS_H TRUE) - include_directories(${IBUS_INCLUDE_DIRS}) - list(APPEND EXTRA_LIBS ${IBUS_LIBRARIES}) - endif() - endif() - - check_include_file("fcitx/frontend.h" HAVE_FCITX_FRONTEND_H) - endif() - - if(INPUT_TSLIB) - check_c_source_compiles(" - #include \"tslib.h\" - int main(int argc, char** argv) { }" HAVE_INPUT_TSLIB) - if(HAVE_INPUT_TSLIB) - set(SDL_INPUT_TSLIB 1) - list(APPEND EXTRA_LIBS ts) - endif() - endif() - - if(SDL_JOYSTICK) - CheckUSBHID() # seems to be BSD specific - limit the test to BSD only? - if(LINUX AND NOT ANDROID) - set(SDL_JOYSTICK_LINUX 1) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/linux/*.c ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - endif() - endif() - - CheckPTHREAD() - - if(CLOCK_GETTIME) - check_library_exists(rt clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - list(APPEND EXTRA_LIBS rt) - set(HAVE_CLOCK_GETTIME 1) - else() - check_library_exists(c clock_gettime "" FOUND_CLOCK_GETTIME) - if(FOUND_CLOCK_GETTIME) - set(HAVE_CLOCK_GETTIME 1) - endif() - endif() - endif() - - check_include_file(linux/version.h HAVE_LINUX_VERSION_H) - if(HAVE_LINUX_VERSION_H) - set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -DHAVE_LINUX_VERSION_H") - endif() - - if(SDL_POWER) - if(LINUX) - set(SDL_POWER_LINUX 1) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/linux/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - endif() - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_UNIX 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(RPATH) - set(SDL_RLD_FLAGS "") - if(BSDI OR FREEBSD OR LINUX OR NETBSD) - set(CMAKE_REQUIRED_FLAGS "-Wl,--enable-new-dtags") - check_c_compiler_flag("" HAVE_ENABLE_NEW_DTAGS) - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - if(HAVE_ENABLE_NEW_DTAGS) - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir} -Wl,--enable-new-dtags") - else() - set(SDL_RLD_FLAGS "-Wl,-rpath,\${libdir}") - endif() - elseif(SOLARIS) - set(SDL_RLD_FLAGS "-R\${libdir}") - endif() - set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE) - set(HAVE_RPATH TRUE) - endif() - -elseif(WINDOWS) - find_program(WINDRES windres) - - check_c_source_compiles(" - #include - int main(int argc, char **argv) { }" HAVE_WIN32_CC) - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(MSVC) - # Prevent codegen that would use the VC runtime libraries. - set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/GS-") - if(NOT ARCH_64) - set_property(DIRECTORY . APPEND PROPERTY COMPILE_OPTIONS "/arch:SSE") - endif() - endif() - - # Check for DirectX - if(DIRECTX) - if(DEFINED MSVC_VERSION AND NOT ${MSVC_VERSION} LESS 1700) - set(USE_WINSDK_DIRECTX TRUE) - endif() - if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) - if("$ENV{DXSDK_DIR}" STREQUAL "") - message_error("DIRECTX requires the \$DXSDK_DIR environment variable to be set") - endif() - set(CMAKE_REQUIRED_FLAGS "/I\"$ENV{DXSDK_DIR}\\Include\"") - endif() - - if(HAVE_WIN32_CC) - # xinput.h may need windows.h, but doesn't include it itself. - check_c_source_compiles(" - #include - #include - int main(int argc, char **argv) { }" HAVE_XINPUT_H) - check_c_source_compiles(" - #include - #include - XINPUT_GAMEPAD_EX x1; - int main(int argc, char **argv) { }" HAVE_XINPUT_GAMEPAD_EX) - check_c_source_compiles(" - #include - #include - XINPUT_STATE_EX s1; - int main(int argc, char **argv) { }" HAVE_XINPUT_STATE_EX) - else() - check_include_file(xinput.h HAVE_XINPUT_H) - endif() - - check_include_file(d3d9.h HAVE_D3D_H) - check_include_file(d3d11_1.h HAVE_D3D11_H) - check_include_file(ddraw.h HAVE_DDRAW_H) - check_include_file(dsound.h HAVE_DSOUND_H) - check_include_file(dinput.h HAVE_DINPUT_H) - check_include_file(mmdeviceapi.h HAVE_MMDEVICEAPI_H) - check_include_file(audioclient.h HAVE_AUDIOCLIENT_H) - check_include_file(dxgi.h HAVE_DXGI_H) - if(HAVE_D3D_H OR HAVE_D3D11_H OR HAVE_DDRAW_H OR HAVE_DSOUND_H OR HAVE_DINPUT_H) - set(HAVE_DIRECTX TRUE) - if(NOT CMAKE_COMPILER_IS_MINGW AND NOT USE_WINSDK_DIRECTX) - # TODO: change $ENV{DXSDL_DIR} to get the path from the include checks - link_directories($ENV{DXSDK_DIR}\\lib\\${PROCESSOR_ARCH}) - include_directories($ENV{DXSDK_DIR}\\Include) - endif() - endif() - set(CMAKE_REQUIRED_FLAGS ${ORIG_CMAKE_REQUIRED_FLAGS}) - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_WINMM 1) - file(GLOB WINMM_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/winmm/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WINMM_AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - - if(HAVE_DSOUND_H) - set(SDL_AUDIO_DRIVER_DSOUND 1) - file(GLOB DSOUND_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/directsound/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${DSOUND_AUDIO_SOURCES}) - endif() - - if(HAVE_AUDIOCLIENT_H AND HAVE_MMDEVICEAPI_H) - set(SDL_AUDIO_DRIVER_WASAPI 1) - file(GLOB WASAPI_AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/wasapi/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WASAPI_AUDIO_SOURCES}) - endif() - endif() - - if(SDL_VIDEO) - # requires SDL_LOADSO on Windows (IME, DX, etc.) - if(NOT SDL_LOADSO) - message_error("SDL_VIDEO requires SDL_LOADSO, which is not enabled") - endif() - set(SDL_VIDEO_DRIVER_WINDOWS 1) - file(GLOB WIN_VIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${WIN_VIDEO_SOURCES}) - - if(RENDER_D3D AND HAVE_D3D_H) - set(SDL_VIDEO_RENDER_D3D 1) - set(HAVE_RENDER_D3D TRUE) - endif() - if(RENDER_D3D AND HAVE_D3D11_H) - set(SDL_VIDEO_RENDER_D3D11 1) - set(HAVE_RENDER_D3D TRUE) - endif() - set(HAVE_SDL_VIDEO TRUE) - endif() - - if(SDL_THREADS) - set(SDL_THREAD_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_sysmutex.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_syssem.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systhread.c - ${SDL2_SOURCE_DIR}/src/thread/windows/SDL_systls.c - ${SDL2_SOURCE_DIR}/src/thread/generic/SDL_syscond.c) - set(HAVE_SDL_THREADS TRUE) - endif() - - if(SDL_POWER) - set(SDL_POWER_WINDOWS 1) - set(SOURCE_FILES ${SOURCE_FILES} ${SDL2_SOURCE_DIR}/src/power/windows/SDL_syspower.c) - set(HAVE_SDL_POWER TRUE) - endif() - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_WINDOWS 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Libraries for Win32 native and MinGW - list(APPEND EXTRA_LIBS user32 gdi32 winmm imm32 ole32 oleaut32 version uuid) - - # TODO: in configure.in the check for timers is set on - # cygwin | mingw32* - does this include mingw32CE? - if(SDL_TIMERS) - set(SDL_TIMER_WINDOWS 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif() - - if(SDL_LOADSO) - set(SDL_LOADSO_WINDOWS 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) - set(HAVE_SDL_LOADSO TRUE) - endif() - - file(GLOB CORE_SOURCES ${SDL2_SOURCE_DIR}/src/core/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${CORE_SOURCES}) - - if(SDL_VIDEO) - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_WGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - set(HAVE_VIDEO_OPENGLES TRUE) - endif() - endif() - - if(SDL_JOYSTICK) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/windows/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - if(HAVE_DINPUT_H) - set(SDL_JOYSTICK_DINPUT 1) - list(APPEND EXTRA_LIBS dinput8) - if(CMAKE_COMPILER_IS_MINGW) - list(APPEND EXTRA_LIBS dxerr8) - elseif (NOT USE_WINSDK_DIRECTX) - list(APPEND EXTRA_LIBS dxerr) - endif() - endif() - if(HAVE_XINPUT_H) - set(SDL_JOYSTICK_XINPUT 1) - endif() - if(NOT HAVE_DINPUT_H AND NOT HAVE_XINPUT_H) - set(SDL_JOYSTICK_WINMM 1) - endif() - set(HAVE_SDL_JOYSTICK TRUE) - - if(SDL_HAPTIC) - if(HAVE_DINPUT_H OR HAVE_XINPUT_H) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/windows/*.c) - if(HAVE_DINPUT_H) - set(SDL_HAPTIC_DINPUT 1) - endif() - if(HAVE_XINPUT_H) - set(SDL_HAPTIC_XINPUT 1) - endif() - else() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SDL_HAPTIC_DUMMY 1) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - endif() - endif() - - file(GLOB VERSION_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.rc) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/windows/*.c) - if(MINGW OR CYGWIN) - list(APPEND EXTRA_LIBS mingw32) - list(APPEND EXTRA_LDFLAGS "-mwindows") - set(SDL_CFLAGS "${SDL_CFLAGS} -Dmain=SDL_main") - list(APPEND SDL_LIBS "-lmingw32" "-lSDL2main" "-mwindows") - endif() -elseif(APPLE) - # TODO: rework this all for proper MacOS X, iOS and Darwin support - - # We always need these libs on macOS at the moment. - # !!! FIXME: we need Carbon for some very old API calls in - # !!! FIXME: src/video/cocoa/SDL_cocoakeyboard.c, but we should figure out - # !!! FIXME: how to dump those. - if(NOT IOS) - set(SDL_FRAMEWORK_COCOA 1) - set(SDL_FRAMEWORK_CARBON 1) - endif() - - # Requires the darwin file implementation - if(SDL_FILE) - file(GLOB EXTRA_SOURCES ${SDL2_SOURCE_DIR}/src/file/cocoa/*.m) - set(SOURCE_FILES ${EXTRA_SOURCES} ${SOURCE_FILES}) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${EXTRA_SOURCES} PROPERTIES LANGUAGE C) - set(HAVE_SDL_FILE TRUE) - # !!! FIXME: why is COREVIDEO inside this if() block? - set(SDL_FRAMEWORK_COREVIDEO 1) - else() - message_error("SDL_FILE must be enabled to build on MacOS X") - endif() - - if(SDL_AUDIO) - set(SDL_AUDIO_DRIVER_COREAUDIO 1) - file(GLOB AUDIO_SOURCES ${SDL2_SOURCE_DIR}/src/audio/coreaudio/*.m) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${AUDIO_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${AUDIO_SOURCES}) - set(HAVE_SDL_AUDIO TRUE) - set(SDL_FRAMEWORK_COREAUDIO 1) - set(SDL_FRAMEWORK_AUDIOTOOLBOX 1) - endif() - - if(SDL_JOYSTICK) - set(SDL_JOYSTICK_IOKIT 1) - if (IOS) - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/iphoneos/*.m ${SDL2_SOURCE_DIR}/src/joystick/steam/*.c) - else() - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/darwin/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - set(HAVE_SDL_JOYSTICK TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - endif() - - if(SDL_HAPTIC) - set(SDL_HAPTIC_IOKIT 1) - if (IOS) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SDL_HAPTIC_DUMMY 1) - else() - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/darwin/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) - set(HAVE_SDL_HAPTIC TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - set(SDL_FRAMEWORK_FF 1) - if(NOT SDL_JOYSTICK) - message(FATAL_ERROR "SDL_HAPTIC requires SDL_JOYSTICK to be enabled") - endif() - endif() - - if(SDL_POWER) - set(SDL_POWER_MACOSX 1) - if (IOS) - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/uikit/*.m) - else() - file(GLOB POWER_SOURCES ${SDL2_SOURCE_DIR}/src/power/macosx/*.c) - endif() - set(SOURCE_FILES ${SOURCE_FILES} ${POWER_SOURCES}) - set(HAVE_SDL_POWER TRUE) - set(SDL_FRAMEWORK_IOKIT 1) - endif() - - if(SDL_TIMERS) - set(SDL_TIMER_UNIX 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/unix/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(SDL_FILESYSTEM) - set(SDL_FILESYSTEM_COCOA 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/cocoa/*.m) - # !!! FIXME: modern CMake doesn't need "LANGUAGE C" for Objective-C. - set_source_files_properties(${FILESYSTEM_SOURCES} PROPERTIES LANGUAGE C) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - endif() - - # Actually load the frameworks at the end so we don't duplicate include. - if(SDL_FRAMEWORK_COREVIDEO) - find_library(COREVIDEO CoreVideo) - list(APPEND EXTRA_LIBS ${COREVIDEO}) - endif() - if(SDL_FRAMEWORK_COCOA) - find_library(COCOA_LIBRARY Cocoa) - list(APPEND EXTRA_LIBS ${COCOA_LIBRARY}) - endif() - if(SDL_FRAMEWORK_IOKIT) - find_library(IOKIT IOKit) - list(APPEND EXTRA_LIBS ${IOKIT}) - endif() - if(SDL_FRAMEWORK_FF) - find_library(FORCEFEEDBACK ForceFeedback) - list(APPEND EXTRA_LIBS ${FORCEFEEDBACK}) - endif() - if(SDL_FRAMEWORK_CARBON) - find_library(CARBON_LIBRARY Carbon) - list(APPEND EXTRA_LIBS ${CARBON_LIBRARY}) - endif() - if(SDL_FRAMEWORK_COREAUDIO) - find_library(COREAUDIO CoreAudio) - list(APPEND EXTRA_LIBS ${COREAUDIO}) - endif() - if(SDL_FRAMEWORK_AUDIOTOOLBOX) - find_library(AUDIOTOOLBOX AudioToolbox) - list(APPEND EXTRA_LIBS ${AUDIOTOOLBOX}) - endif() - - # iOS hack needed - http://code.google.com/p/ios-cmake/ ? - if(SDL_VIDEO) - if (IOS) - set(SDL_VIDEO_DRIVER_UIKIT 1) - file(GLOB UIKITVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/uikit/*.m) - set(SOURCE_FILES ${SOURCE_FILES} ${UIKITVIDEO_SOURCES}) - else() - CheckCOCOA() - if(VIDEO_OPENGL) - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_CGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - - if(VIDEO_OPENGLES) - set(SDL_VIDEO_OPENGL_EGL 1) - set(SDL_VIDEO_OPENGL_ES2 1) - set(SDL_VIDEO_RENDER_OGL_ES2 1) - set(HAVE_VIDEO_OPENGLES TRUE) - endif() - endif() - endif() - - CheckPTHREAD() -elseif(HAIKU) - if(SDL_VIDEO) - set(SDL_VIDEO_DRIVER_HAIKU 1) - file(GLOB HAIKUVIDEO_SOURCES ${SDL2_SOURCE_DIR}/src/video/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAIKUVIDEO_SOURCES}) - set(HAVE_SDL_VIDEO TRUE) - - set(SDL_FILESYSTEM_HAIKU 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/haiku/*.cc) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) - set(HAVE_SDL_FILESYSTEM TRUE) - - if(SDL_TIMERS) - set(SDL_TIMER_HAIKU 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/haiku/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) - set(HAVE_SDL_TIMERS TRUE) - endif(SDL_TIMERS) - - if(VIDEO_OPENGL) - # TODO: Use FIND_PACKAGE(OpenGL) instead - set(SDL_VIDEO_OPENGL 1) - set(SDL_VIDEO_OPENGL_BGL 1) - set(SDL_VIDEO_RENDER_OGL 1) - list(APPEND EXTRA_LIBS GL) - set(HAVE_VIDEO_OPENGL TRUE) - endif() - endif() - - CheckPTHREAD() -endif() - -if(VIDEO_VULKAN) - set(SDL_VIDEO_VULKAN 1) -endif() - -# Dummies -# configure.in does it differently: -# if not have X -# if enable_X { SDL_X_DISABLED = 1 } -# [add dummy sources] -# so it always adds a dummy, without checking, if it was actually requested. -# This leads to missing internal references on building, since the -# src/X/*.c does not get included. -if(NOT HAVE_SDL_JOYSTICK) - set(SDL_JOYSTICK_DISABLED 1) - if(SDL_JOYSTICK AND NOT APPLE) # results in unresolved symbols on OSX - - file(GLOB JOYSTICK_SOURCES ${SDL2_SOURCE_DIR}/src/joystick/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${JOYSTICK_SOURCES}) - endif() -endif() -if(NOT HAVE_SDL_HAPTIC) - set(SDL_HAPTIC_DISABLED 1) - file(GLOB HAPTIC_SOURCES ${SDL2_SOURCE_DIR}/src/haptic/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${HAPTIC_SOURCES}) -endif() -if(NOT HAVE_SDL_LOADSO) - set(SDL_LOADSO_DISABLED 1) - file(GLOB LOADSO_SOURCES ${SDL2_SOURCE_DIR}/src/loadso/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${LOADSO_SOURCES}) -endif() -if(NOT HAVE_SDL_FILESYSTEM) - set(SDL_FILESYSTEM_DISABLED 1) - file(GLOB FILESYSTEM_SOURCES ${SDL2_SOURCE_DIR}/src/filesystem/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${FILESYSTEM_SOURCES}) -endif() - -# We always need to have threads and timers around -if(NOT HAVE_SDL_THREADS) - set(SDL_THREADS_DISABLED 1) - file(GLOB THREADS_SOURCES ${SDL2_SOURCE_DIR}/src/thread/generic/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${THREADS_SOURCES}) -endif() -if(NOT HAVE_SDL_TIMERS) - set(SDL_TIMERS_DISABLED 1) - file(GLOB TIMER_SOURCES ${SDL2_SOURCE_DIR}/src/timer/dummy/*.c) - set(SOURCE_FILES ${SOURCE_FILES} ${TIMER_SOURCES}) -endif() - -if(NOT SDLMAIN_SOURCES) - file(GLOB SDLMAIN_SOURCES ${SDL2_SOURCE_DIR}/src/main/dummy/*.c) -endif() - -# Append the -MMD -MT flags -# if(DEPENDENCY_TRACKING) -# if(COMPILER_IS_GNUCC) -# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -MMD -MT \$@") -# endif() -# endif() - -configure_file("${SDL2_SOURCE_DIR}/include/SDL_config.h.cmake" - "${SDL2_BINARY_DIR}/include/SDL_config.h") - -# Prepare the flags and remove duplicates -if(EXTRA_LDFLAGS) - list(REMOVE_DUPLICATES EXTRA_LDFLAGS) -endif() -if(EXTRA_LIBS) - list(REMOVE_DUPLICATES EXTRA_LIBS) -endif() -if(EXTRA_CFLAGS) - list(REMOVE_DUPLICATES EXTRA_CFLAGS) -endif() -listtostr(EXTRA_CFLAGS _EXTRA_CFLAGS) -set(EXTRA_CFLAGS ${_EXTRA_CFLAGS}) - -# Compat helpers for the configuration files -if(NOT WINDOWS OR CYGWIN) - # TODO: we need a Windows script, too - execute_process(COMMAND sh ${SDL2_SOURCE_DIR}/build-scripts/updaterev.sh) - - set(prefix ${CMAKE_INSTALL_PREFIX}) - set(exec_prefix "\${prefix}") - set(libdir "\${exec_prefix}/lib${LIB_SUFFIX}") - set(bindir "\${exec_prefix}/bin") - set(includedir "\${prefix}/include") - if(SDL_STATIC) - set(ENABLE_STATIC_TRUE "") - set(ENABLE_STATIC_FALSE "#") - else() - set(ENABLE_STATIC_TRUE "#") - set(ENABLE_STATIC_FALSE "") - endif() - if(SDL_SHARED) - set(ENABLE_SHARED_TRUE "") - set(ENABLE_SHARED_FALSE "#") - else() - set(ENABLE_SHARED_TRUE "#") - set(ENABLE_SHARED_FALSE "") - endif() - - # Clean up the different lists - listtostr(EXTRA_LIBS _EXTRA_LIBS "-l") - set(SDL_STATIC_LIBS ${SDL_LIBS} ${EXTRA_LDFLAGS} ${_EXTRA_LIBS}) - list(REMOVE_DUPLICATES SDL_STATIC_LIBS) - listtostr(SDL_STATIC_LIBS _SDL_STATIC_LIBS) - set(SDL_STATIC_LIBS ${_SDL_STATIC_LIBS}) - listtostr(SDL_LIBS _SDL_LIBS) - set(SDL_LIBS ${_SDL_LIBS}) - - # MESSAGE(STATUS "SDL_LIBS: ${SDL_LIBS}") - # MESSAGE(STATUS "SDL_STATIC_LIBS: ${SDL_STATIC_LIBS}") - - configure_file("${SDL2_SOURCE_DIR}/sdl2.pc.in" - "${SDL2_BINARY_DIR}/sdl2.pc" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config") - configure_file("${SDL2_SOURCE_DIR}/sdl2-config.in" - "${SDL2_BINARY_DIR}/sdl2-config" @ONLY) - configure_file("${SDL2_SOURCE_DIR}/SDL2.spec.in" - "${SDL2_BINARY_DIR}/SDL2.spec" @ONLY) -endif() - -##### Info output ##### -message(STATUS "") -message(STATUS "SDL2 was configured with the following options:") -message(STATUS "") -message(STATUS "Platform: ${CMAKE_SYSTEM}") -message(STATUS "64-bit: ${ARCH_64}") -message(STATUS "Compiler: ${CMAKE_C_COMPILER}") -message(STATUS "") -message(STATUS "Subsystems:") -foreach(_SUB ${SDL_SUBSYSTEMS}) - string(TOUPPER ${_SUB} _OPT) - message_bool_option(${_SUB} SDL_${_OPT}) -endforeach() -message(STATUS "") -message(STATUS "Options:") -list(SORT ALLOPTIONS) -foreach(_OPT ${ALLOPTIONS}) - # Longest option is VIDEO_X11_XSCREENSAVER = 22 characters - # Get the padding - string(LENGTH ${_OPT} _OPTLEN) - math(EXPR _PADLEN "23 - ${_OPTLEN}") - string(RANDOM LENGTH ${_PADLEN} ALPHABET " " _PADDING) - message_tested_option(${_OPT} ${_PADDING}) -endforeach() -message(STATUS "") -message(STATUS " CFLAGS: ${CMAKE_C_FLAGS}") -message(STATUS " EXTRA_CFLAGS: ${EXTRA_CFLAGS}") -message(STATUS " EXTRA_LDFLAGS: ${EXTRA_LDFLAGS}") -message(STATUS " EXTRA_LIBS: ${EXTRA_LIBS}") -message(STATUS "") -message(STATUS " Build Shared Library: ${SDL_SHARED}") -message(STATUS " Build Static Library: ${SDL_STATIC}") -if(SDL_STATIC) - message(STATUS " Build Static Library with Position Independent Code: ${SDL_STATIC_PIC}") -endif() -message(STATUS "") -if(UNIX) - message(STATUS "If something was not detected, although the libraries") - message(STATUS "were installed, then make sure you have set the") - message(STATUS "CFLAGS and LDFLAGS environment variables correctly.") - message(STATUS "") -endif() - -# Ensure that the extra cflags are used at compile time -set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${EXTRA_CFLAGS}") - -# Always build SDLmain -add_library(SDL2main STATIC ${SDLMAIN_SOURCES}) -target_include_directories(SDL2main PUBLIC $) -set(_INSTALL_LIBS "SDL2main") -if (NOT ANDROID) - set_target_properties(SDL2main PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) -endif() - -if(SDL_SHARED) - add_library(SDL2 SHARED ${SOURCE_FILES} ${VERSION_SOURCES}) - if(APPLE) - set_target_properties(SDL2 PROPERTIES MACOSX_RPATH 1) - elseif(UNIX AND NOT ANDROID) - set_target_properties(SDL2 PROPERTIES - VERSION ${LT_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2-${LT_RELEASE}") - else() - set_target_properties(SDL2 PROPERTIES - VERSION ${SDL_VERSION} - SOVERSION ${LT_REVISION} - OUTPUT_NAME "SDL2") - endif() - if(MSVC AND NOT LIBC) - # Don't try to link with the default set of libraries. - set_target_properties(SDL2 PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2 PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2 PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - set(_INSTALL_LIBS "SDL2" ${_INSTALL_LIBS}) - target_link_libraries(SDL2 ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) - target_include_directories(SDL2 PUBLIC $) - if (NOT ANDROID) - set_target_properties(SDL2 PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) - endif() -endif() - -if(SDL_STATIC) - set (BUILD_SHARED_LIBS FALSE) - add_library(SDL2-static STATIC ${SOURCE_FILES}) - if (NOT SDL_SHARED OR NOT WIN32) - set_target_properties(SDL2-static PROPERTIES OUTPUT_NAME "SDL2") - # Note: Apparently, OUTPUT_NAME must really be unique; even when - # CMAKE_IMPORT_LIBRARY_SUFFIX or the like are given. Otherwise - # the static build may race with the import lib and one will get - # clobbered, when the suffix is realized via subsequent rename. - endif() - set_target_properties(SDL2-static PROPERTIES POSITION_INDEPENDENT_CODE ${SDL_STATIC_PIC}) - if(MSVC AND NOT LIBC) - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_RELEASE "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES LINK_FLAGS_DEBUG "/NODEFAULTLIB") - set_target_properties(SDL2-static PROPERTIES STATIC_LIBRARY_FLAGS "/NODEFAULTLIB") - endif() - # TODO: Win32 platforms keep the same suffix .lib for import and static - # libraries - do we need to consider this? - set(_INSTALL_LIBS "SDL2-static" ${_INSTALL_LIBS}) - target_link_libraries(SDL2-static ${EXTRA_LIBS} ${EXTRA_LDFLAGS}) - target_include_directories(SDL2-static PUBLIC $) - if (NOT ANDROID) - set_target_properties(SDL2-static PROPERTIES DEBUG_POSTFIX ${SDL_CMAKE_DEBUG_POSTFIX}) - endif() -endif() - -##### Tests ##### - -if(SDL_TEST) - file(GLOB TEST_SOURCES ${SDL2_SOURCE_DIR}/src/test/*.c) - add_library(SDL2_test STATIC ${TEST_SOURCES}) - - add_subdirectory(test) -endif() - -##### Installation targets ##### -install(TARGETS ${_INSTALL_LIBS} EXPORT SDL2Targets - LIBRARY DESTINATION "lib${LIB_SUFFIX}" - ARCHIVE DESTINATION "lib${LIB_SUFFIX}" - RUNTIME DESTINATION bin) - -##### Export files ##### -if (APPLE) - set(PKG_PREFIX "SDL2.framework/Resources") -elseif (WINDOWS) - set(PKG_PREFIX "cmake") -else () - set(PKG_PREFIX "lib/cmake/SDL2") -endif () - -include(CMakePackageConfigHelpers) -write_basic_package_version_file("${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake" - VERSION ${SDL_VERSION} - COMPATIBILITY AnyNewerVersion -) - -install(EXPORT SDL2Targets - FILE SDL2Targets.cmake - NAMESPACE SDL2:: - DESTINATION ${PKG_PREFIX} -) -install( - FILES - ${CMAKE_CURRENT_SOURCE_DIR}/SDL2Config.cmake - ${CMAKE_BINARY_DIR}/SDL2ConfigVersion.cmake - DESTINATION ${PKG_PREFIX} - COMPONENT Devel -) - -file(GLOB INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/*.h) -file(GLOB BIN_INCLUDE_FILES ${SDL2_BINARY_DIR}/include/*.h) -foreach(_FNAME ${BIN_INCLUDE_FILES}) - get_filename_component(_INCNAME ${_FNAME} NAME) - list(REMOVE_ITEM INCLUDE_FILES ${SDL2_SOURCE_DIR}/include/${_INCNAME}) -endforeach() -list(APPEND INCLUDE_FILES ${BIN_INCLUDE_FILES}) -install(FILES ${INCLUDE_FILES} DESTINATION include/SDL2) - -if(NOT (WINDOWS OR CYGWIN)) - if(SDL_SHARED) - if (APPLE) - set(SOEXT "dylib") - else() - set(SOEXT "so") - endif() - if(NOT ANDROID) - install(CODE " - execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink - \"libSDL2-2.0.${SOEXT}\" \"libSDL2.${SOEXT}\")") - install(FILES ${SDL2_BINARY_DIR}/libSDL2.${SOEXT} DESTINATION "lib${LIB_SUFFIX}") - endif() - endif() - if(FREEBSD) - # FreeBSD uses ${PREFIX}/libdata/pkgconfig - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc DESTINATION "libdata/pkgconfig") - else() - install(FILES ${SDL2_BINARY_DIR}/sdl2.pc - DESTINATION "lib${LIB_SUFFIX}/pkgconfig") - endif() - install(PROGRAMS ${SDL2_BINARY_DIR}/sdl2-config DESTINATION bin) - # TODO: what about the .spec file? Is it only needed for RPM creation? - install(FILES "${SDL2_SOURCE_DIR}/sdl2.m4" DESTINATION "${CMAKE_INSTALL_FULL_DATAROOTDIR}/aclocal") -endif() - -##### Uninstall target ##### - -if(NOT TARGET uninstall) - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in" - "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" - IMMEDIATE @ONLY) - - add_custom_target(uninstall - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake) -endif() - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh deleted file mode 100644 index 6ad62d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/sync.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -#pattern=*.py CMakeLists.txt -pattern="utils.cmake common.yml CMakeLists.txt *.py" -other_dir=$1 - -for i in $(ls $pattern); do - if [ -f $other_dir/$i ]; then - diff $i $other_dir/$i > /dev/null || meld $i $other_dir/$i - fi -done - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py deleted file mode 100644 index aaad57e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/third_party.py +++ /dev/null @@ -1,1508 +0,0 @@ -import os -import sys -import utils -import logging -import traceback -import datetime -import hash_version -import copy -import fnmatch - - -class InvalidPlatform(Exception): - def __init__(self, plat): - self._plat = plat - def __str__(self): - return "Invalid platform detected: %s" % self._plat - - -class DontExistsFile(Exception): - def __init__(self, source_filename): - self._source_filename = source_filename - def __str__(self): - return 'Dont exists file %s' % self._source_filename - - -class FailPrepare(Exception): - def __init__(self, node): - self._node = node - def __str__(self): - return 'Failing preparing package: %s' % self._node.get_package_name() - - -class AmbiguationLibs(Exception): - def __init__(self, kind, package, build_mode): - self._kind = kind - self._package = package - self._build_mode = build_mode - def __str__(self): - return "Ambiguation in %s in %s. Mode: %s. Candidates:" % (self._kind, self._package, self._build_mode) - - -class NotFoundInDataset(Exception): - def __init__(self, msg): - self._msg = msg - def __str__(self): - return "%s" % self._msg - - -class FailThirdParty(Exception): - def __init__(self, msg): - self._msg = msg - def __str__(self): - return "%s" % self._msg - - -class Found(Exception): - pass - - -def prepare_cmakefiles(cmake_files): - if not os.path.isdir(cmake_files): - logging.error('Invalid cmake files: {}'.format(camkefiles)) - - -def get_identifier(mode): - env = os.environ.copy() - cmaki_pwd = env['CMAKI_PWD'] - if utils.is_windows(): - script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.exe') - else: - script_identifier = os.path.join(cmaki_pwd, 'bin', 'cmaki_identifier.sh') - if not os.path.isfile(script_identifier): - raise Exception("there is no {} script".format(script_identifier)) - env['CMAKI_INFO'] = mode - return list(utils.get_stdout(script_identifier, env=env))[0] - - -def search_fuzzy(data, fuzzy_key, fallback='default'): - for key in data: - if fnmatch.fnmatch(fuzzy_key, key): - return data[key] - else: - if fallback in data: - return data[fallback] - else: - logging.error("not found 'default' platform or %s" % fuzzy_key) - raise Exception("not found '{}'".format(fuzzy_key)) - - -if 'MODE' not in os.environ: - logging.warning('Using Debug by default. For explicit use, define environment var MODE') - os.environ['MODE'] = 'Debug' - -if 'CMAKI_INSTALL' not in os.environ: - logging.warning('Using CMAKI_INSTALL by default. For explicit use, define environment var CMAKI_INSTALL') - os.environ['CMAKI_INSTALL'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier', 'bin') - -if 'CMAKI_PWD' not in os.environ: - logging.warning('Using CMAKI_PWD by default. For explicit use, define environment var CMAKI_PWD') - os.environ['CMAKI_PWD'] = os.path.join( os.getcwd(), '..', 'cmaki_identifier') - - -# -# INMUTABLE GLOBALS -# - -CMAKELIB_URL='https://github.com/makiolo/cmaki.git' -prefered = {} -prefered['Debug'] = ['Debug', 'RelWithDebInfo', 'Release'] -prefered['RelWithDebInfo'] = ['RelWithDebInfo', 'Release', 'Debug'] -prefered['Release'] = ['Release', 'RelWithDebInfo', 'Debug'] -magic_invalid_file = '__not_found__' -exceptions_fail_group = (OSError, IOError, ) -exceptions_fail_program = (KeyboardInterrupt, ) -uncompress_strip_default = '.' -uncompress_prefix_default = '.' -priority_default = 50 -build_unittests_foldername = 'unittest' -# detect platform -platform = get_identifier('ALL') -arch = get_identifier('ARCH') -operative_system = get_identifier('OS') -somask_id = operative_system[0] -archs = {platform: arch} -platforms = [platform] -logging.info('Detecting platform from script like: {} / {}'.format(platform, arch)) - -alias_priority_name = { 10: 'minimal', - 20: 'tools', - 30: 'third_party' } -alias_priority_name_inverse = {v: k for k, v in alias_priority_name.items()} - - -def is_valid(package_name, mask): - return (mask.find(somask_id) != -1) and (package_name != 'dummy') - - -def is_blacklisted(blacklist_file, no_blacklist, package_name): - blacklisted = False - if os.path.exists(blacklist_file): - with open(blacklist_file, 'rt') as f: - for line in f.readlines(): - if line.strip() == package_name: - blacklisted = True - break - # --no-blacklist can annular effect of blacklist - if blacklisted and (package_name in no_blacklist): - blacklisted = False - return blacklisted - - -class ThirdParty: - def __init__(self, user_parameters, name, parameters): - self.user_parameters = user_parameters - self.name = name - self.parameters = parameters - self.depends = [] - self.exceptions = [] - self.interrupted = False - self.ret = 0 # Initial return code - self.fail_stage = "" - self.blacklisted = is_blacklisted(self.user_parameters.blacklist, self.user_parameters.no_blacklist, self.get_package_name()) - self.published_invalidation = False - - - def __hash__(self): - return hash((self.get_package_name(), self.get_priority(), self.get_mask())) - - - def __eq__(self, other): - return (self.get_package_name() == other.get_package_name()) and (self.get_priority() == other.get_priority()) and (self.get_mask() == other.get_mask()) - - - def __ne__(self, other): - return not self.__eq__(other) - - - def __repr__(self): - return "%s (%s)" % (self.get_package_name(), self.get_mask()) - - - def __str__(self): - return "%s (%s)" % (self.get_package_name(), self.get_mask()) - - - def get_uncompress_strip(self, pos = 0): - try: - if isinstance(self.parameters['uncompress_strip'], list): - return self.parameters['uncompress_strip'][pos] - else: - return self.parameters['uncompress_strip'] - except KeyError: - # default value - return uncompress_strip_default - - - def get_uncompress_prefix(self, pos = 0): - try: - if isinstance(self.parameters['uncompress_prefix'], list): - return self.parameters['uncompress_prefix'][pos] - else: - return self.parameters['uncompress_prefix'] - except KeyError: - # default value - return uncompress_prefix_default - - - def get_uncompress(self, pos = 0): - try: - if self.parameters['uncompress'] is not None: - if isinstance(self.parameters['uncompress'], list): - return self.parameters['uncompress'][pos].find(somask_id) != -1 - else: - return self.parameters['uncompress'].find(somask_id) != -1 - else: - return False - except KeyError: - # default value - return True - - - def get_depends_raw(self): - return self.depends - - - def get_depends(self): - try: - return self.parameters['depends'] - except KeyError: - # default value - return None - - - def get_generate_custom_script(self, source_dir): - path_build = self.get_path_custom_script(source_dir, name='.build') - build_content = self.get_build_script_content() - if build_content is not None: - with open(path_build, 'wt') as f: - f.write(build_content) - - - def get_path_custom_script(self, source_folder, name = 'build'): - if utils.is_windows(): - path_build = os.path.join(source_folder, name + '.cmd') - else: - path_build = os.path.join(source_folder, name + '.sh') - return path_build - - - def has_custom_script(self, source_folder): - script_custom = os.path.exists( self.get_path_custom_script(source_folder) ) - return (self.get_build_script_content() is not None) or script_custom - - - def get_build_script_content(self): - try: - if not utils.is_windows(): - return self.parameters['build'] - else: - return self.parameters['build_windows'] - except KeyError: - # default value - return None - - - def get_source(self): - try: - source = self.parameters['source'] - if source is not None: - if not isinstance(source, list): - return [source] - else: - return source - else: - return [] - except KeyError: - # default value - return [] - - - def get_source_filename(self, position=0): - try: - return self.parameters['source_filename'] - except KeyError: - # default value - source = self.get_source()[position] - filename = source.split('/')[-1] - return filename - - - def get_sources_all(self, position=0): - try: - return self.parameters['sources_all'] - except KeyError: - return False - - - def get_before_copy(self): - try: - return self.parameters['before_copy'] - except KeyError: - # default value - return [] - - - def get_short_path(self): - try: - return self.parameters['short_path'] - except KeyError: - # default value - return False - - - def has_library(self, platform_info): - package = self.get_package_name() - return (('static' in platform_info) and (package != 'dummy')) or (('dynamic' in platform_info) and (package != 'dummy')) - - - def needs(self, node): - if node.is_valid(): - self.depends.append(node) - - - def get_package_name(self): - return self.name - - - def get_package_name_norm(self): - package = self.get_package_name() - for c in '-\\/:*?"<>|': - package = package.replace(c, '_') - return package - - - def get_package_name_norm_upper(self): - package_norm = self.get_package_name_norm() - return package_norm.upper() - - - def set_version(self, newversion): - self.parameters['version'] = newversion - - - def get_version(self): - try: - version = self.parameters['version'] - if version is None: - return '0.0.0.0' - else: - return version - except KeyError: - if self.get_package_name() != 'dummy': - raise Exception('[%s] Version is a mandatory field.' % self.get_package_name()) - - - def get_version_manager(self): - try: - version = self.get_version() - if version == '0.0.0.0': - return self.parameters['version_manager'] - else: - # si tiene version -> no usar renombrado git - return None - except KeyError: - return None - - - def get_cmake_target(self): - try: - return self.parameters['cmake_target'] - except KeyError: - return 'install' - - - def get_post_install(self): - try: - return self.parameters['post_install'] - except KeyError: - return [] - - - def get_priority(self): - try: - return int(self.parameters['priority']) - except KeyError: - return priority_default - - - def is_packing(self): - try: - return self.parameters['packing'] - except KeyError: - # default value - return True - - - def get_branch(self): - try: - return self.parameters['branch'] - except KeyError: - # default value - return None - - - def get_build_modes(self): - build_modes = [] - try: - if 'MODE' in os.environ and (os.environ['MODE'] != 'UNDEFINED'): - build_modes.append(os.environ['MODE']) - else: - mode = self.parameters['mode'] - if mode.find('d') != -1: - build_modes.append('Debug') - if mode.find('i') != -1: - build_modes.append('RelWithDebInfo') - if mode.find('r') != -1: - build_modes.append('Release') - except KeyError: - # no mode provided - build_modes.append('Debug') - build_modes.append('RelWithDebInfo') - build_modes.append('Release') - return build_modes - - - def get_mask(self): - try: - return self.parameters['mask'] - except KeyError: - return somask_id - - - def is_valid(self): - if self.blacklisted: - if not self.published_invalidation: - logging.debug('%s is not built because is blacklisted in %s' % (self.get_package_name(), os.path.basename(self.user_parameters.blacklist))) - self.published_invalidation = True - return False - return is_valid(self.get_package_name(), self.get_mask()) - - - def resolver(self, resolved, seen): - seen.append(self) - for edge in self.depends: - if edge not in resolved: - if edge in seen: - raise Exception('Circular reference detected: %s and %s' % (self.get_package_name(), edge.name)) - edge.resolver(resolved, seen) - if self.is_valid(): - resolved.append(self) - seen.remove(self) - - - def get_targets(self): - try: - return self.parameters['targets'] - except KeyError: - # default value - return [] - - - def get_exclude_from_all(self): - try: - return self.parameters['exclude_from_all'] - except KeyError: - # default value - return False - - - def get_exclude_from_clean(self): - try: - return self.parameters['exclude_from_clean'] - except KeyError: - # default value - return False - - - def get_unittest(self): - try: - return self.parameters['unittest'] - except KeyError: - # default value - return None - - - def get_cmake_prefix(self): - try: - cmake_prefix = self.parameters['cmake_prefix'] - if cmake_prefix.endswith('CMakeLists.txt'): - return os.path.dirname(cmake_prefix) - return cmake_prefix - except KeyError: - # default value - return "." - - - def get_generator_targets(self, plat, _, compiler_cpp, ext_sta, ext_dyn): - - package = self.get_package_name_norm() - - for targets in self.get_targets(): - - for target_name in targets: - - platform_info = None - platform_extra = None - - target_info = targets[target_name] - if 'info' in target_info: - outputinfo = search_fuzzy(target_info['info'], plat) - if outputinfo is not None: - platform_info = copy.deepcopy( outputinfo ) - - if 'extra' in target_info: - outputinfo_extra = search_fuzzy(target_info['extra'], plat) - if outputinfo_extra is not None: - platform_extra = copy.deepcopy( outputinfo_extra ) - - if (platform_info is not None) and (platform_extra is not None): - platform_info = utils.smart_merge(platform_info, platform_extra) - - # variables for use in "info" and "extra" - platform_info = utils.apply_replaces_vars(platform_info, { - 'TARGET': target_name, - 'TARGET_UPPER': target_name.upper(), - 'PACKAGE': package, - 'PACKAGE_UPPER': package.upper(), - 'PLATFORM': plat, - 'COMPILER': os.path.basename(compiler_cpp), - 'EXT_DYN': ext_dyn, - 'EXT_STA': ext_sta, - 'ARCH': archs[plat], - }) - - if platform_info is None: - logging.error('No platform info in package %s, platform %s' % (package, plat)) - logging.error("%s" % targets) - sys.exit(1) - - yield (target_name, platform_info) - - - def have_any_in_target(self, plat, key, compiler_replace_maps): - any_static = False - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): - for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - if key in platform_info: - any_static = True - return any_static - - - def get_generate_find_package(self): - try: - return self.parameters['generate_find_package'] - except KeyError: - # default value - return True - - - def compiler_iterator(self, plat, compiler_replace_maps): - - plat_parms = search_fuzzy(self.parameters['platforms'], plat) - try: - generator = plat_parms['generator'] - except KeyError: - generator = None - - try: - compilers = plat_parms['compiler'] - except KeyError: - compilers = None - - # resolve map - compiler_replace_resolved = {} - for var, value in compiler_replace_maps.items(): - new_value = value - new_value = new_value.replace('$PLATFORM', plat) - compiler_replace_resolved[var] = new_value - compiler_replace_resolved['$ARCH'] = archs[plat] - compiler_replace_resolved['${ARCH}'] = archs[plat] - - # get compiler info - compiler = get_identifier('COMPILER') - - ext_dyn = plat_parms['ext_dyn'] - ext_sta = plat_parms['ext_sta'] - if compilers is None: - compilers = [('%s, %s' % (compiler, compiler))] - - for compiler in compilers: - compilers_tuple = compiler.split(',') - assert(len(compilers_tuple) == 2) - compiler_c = compilers_tuple[0].strip() - compiler_cpp = compilers_tuple[1].strip() - - compiler_c = utils.apply_replaces(compiler_c, compiler_replace_resolved) - compiler_cpp = utils.apply_replaces(compiler_cpp, compiler_replace_resolved) - - env_new = {} - env_modified = os.environ.copy() - - for env_iter in [env_modified, env_new]: - - env_iter['COMPILER'] = str(compiler) - env_iter['PLATFORM'] = str(plat) - env_iter['PACKAGE'] = str(self.get_package_name()) - env_iter['VERSION'] = str(self.get_version()) - env_iter['ARCH'] = str(archs[plat]) - - try: - environment = plat_parms['environment'] - - try: - environment_remove = environment['remove'] - for key, values in environment_remove.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - oldpath = oldpath.replace(v, '') - env_iter[key] = oldpath - except KeyError: - pass - - # insert front with seprator = ":" - try: - environment_push_front = environment['push_front'] - for key, values in environment_push_front.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - if len(oldpath) == 0: - separator = '' - else: - # -L / -I / -R use space - if v.startswith('-'): - separator = ' ' - else: - separator = ':' - oldpath = str('%s%s%s' % (v, separator, oldpath)) - env_iter[key] = oldpath - except KeyError: - pass - - # insert back with separator " " - try: - environment_flags = environment['flags'] - for key, values in environment_flags.items(): - try: - oldpath = env_iter[key] - except KeyError: - oldpath = '' - uniq_values = set() - for v in values: - v = utils.apply_replaces(v, compiler_replace_resolved) - uniq_values.add(v) - for v in uniq_values: - if len(oldpath) == 0: - separator = '' - else: - separator = ' ' - oldpath = str('%s%s%s' % (oldpath, separator, v)) - env_iter[key] = oldpath - except KeyError: - pass - - # insert new environment variables - try: - environment_assign = environment['assign'] - for key, value in environment_assign.items(): - value = utils.apply_replaces(value, compiler_replace_resolved) - env_iter[key] = value - except KeyError: - pass - - except KeyError: - pass - - yield (compiler_c, compiler_cpp, generator, ext_sta, ext_dyn, env_modified, env_new) - - - def remove_cmake3p(self, cmake3p_dir): - package_cmake3p = os.path.join(cmake3p_dir, self.get_base_folder()) - logging.debug('Removing cmake3p %s' % package_cmake3p) - if os.path.exists(package_cmake3p): - utils.tryremove_dir(package_cmake3p) - for dep in self.get_depends_raw(): - dep.remove_cmake3p(cmake3p_dir) - - - def get_base_folder(self): - package = self.get_package_name() - version = self.get_version() - return '%s-%s' % (package, version) - - - def get_workspace(self, plat): - package = self.get_package_name() - version = self.get_version() - return '%s-%s-%s' % (package, version, plat) - - - def get_build_directory(self, plat, build_mode): - package = self.get_package_name() - version = self.get_version() - if not self.get_short_path(): - return '.build_%s-%s-%s_%s' % (package, version, plat, build_mode) - else: - return '.bs_%s%s%s%s' % (package[:3], version[-1:], plat, build_mode) - - def get_binary_workspace(self, plat): - install_directory = os.path.join(self.user_parameters.prefix, self.get_workspace(plat)) - utils.trymkdir(install_directory) - return install_directory - - - def get_install_directory(self, plat): - install_directory = os.path.join(self.get_binary_workspace(plat), self.get_base_folder(), plat) - return install_directory - - - def get_download_directory(self): - package = self.get_package_name() - return '.download_%s' % package - - - def get_original_directory(self): - package = self.get_package_name() - return '.download_original_%s' % package - - - def apply_replace_maps(self, compiler_replace_maps): - package = self.get_package_name() - package_norm = self.get_package_name_norm() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - basedir = os.path.abspath('..') - compiler_replace_maps['$%s_BASE' % package_norm] = os.path.join(basedir, self.get_workspace('$PLATFORM'), self.get_base_folder()) - - - def generate_scripts_headers(self, compiler_replace_maps): - package = self.get_package_name() - package_norm = self.get_package_name_norm() - version = self.get_version() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - basedir = self.user_parameters.prefix - rootdir = self.user_parameters.rootdir - - # generate find.cmake - build_directory = self.get_build_directory(r"${CMAKI_PLATFORM}", r"${GLOBAL_BUILD_MODE}") - with open('find.cmake', 'wt') as f: - f.write("SET(%s_VERSION %s CACHE STRING \"Last version compiled ${PACKAGE}\" FORCE)\n" % (package_norm, version)) - f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}/include\" %s_INCLUDE)\n" % (basedir, package, version, package, version, package_norm)) - f.write("file(TO_NATIVE_PATH \"%s/%s-%s-${CMAKI_PLATFORM}/%s-%s/${CMAKI_PLATFORM}\" %s_LIBDIR)\n" % (basedir, package, version, package, version, package_norm)) - f.write("file(TO_NATIVE_PATH \"%s/%s\" %s_BUILD)\n" % (rootdir, build_directory, package_norm)) - f.write("SET(%s_INCLUDE ${%s_INCLUDE} CACHE STRING \"Include dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - f.write("SET(%s_LIBDIR ${%s_LIBDIR} CACHE STRING \"Libs dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - f.write("SET(%s_BUILD ${%s_BUILD} CACHE STRING \"Build dir %s\" FORCE)\n" % (package_norm, package_norm, package)) - - # genereate find.script / cmd - if utils.is_windows(): - build_directory = self.get_build_directory("%PLATFORM%", "%BUILD_MODE%") - with open('find.cmd', 'wt') as f: - f.write("set %s_VERSION=%s\n" % (package_norm, version)) - f.write("set %s_HOME=%s\%s-%s-%%PLATFORM%%\%s-%s\%%PLATFORM%%\n" % (package_norm, basedir, package, version, package, version)) - f.write("set %s_BASE=%s\%s-%s-%%PLATFORM%%\%s-%s\n" % (package_norm, basedir, package, version, package, version)) - f.write("set SELFHOME=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\%%PLATFORM%%\n" % (basedir)) - f.write("set SELFBASE=%s\%%PACKAGE%%-%%VERSION%%-%%PLATFORM%%\%%PACKAGE%%-%%VERSION%%\n" % (basedir)) - f.write("set %s_BUILD=%s\%s\n" % (package_norm, rootdir, build_directory)) - f.write(r"md %SELFHOME%") - f.write("\n") - else: - build_directory = self.get_build_directory("${PLATFORM}", "${BUILD_MODE}") - with open('find.script', 'wt') as f: - f.write("#!/bin/bash\n") - f.write("%s_VERSION=%s\n" % (package_norm, version)) - f.write("%s_HOME=%s/%s-%s-$PLATFORM/%s-%s/$PLATFORM\n" % (package_norm, basedir, package, version, package, version)) - f.write("%s_BASE=%s/%s-%s-$PLATFORM/%s-%s\n" % (package_norm, basedir, package, version, package, version)) - f.write("SELFHOME=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION/$PLATFORM\n" % (basedir)) - f.write("SELFBASE=%s/$PACKAGE-$VERSION-$PLATFORM/$PACKAGE-$VERSION\n" % (basedir)) - f.write("%s_BUILD=%s/%s\n" % (package_norm, rootdir, build_directory)) - f.write("mkdir -p $SELFHOME\n") - - - def remove_cmakefiles(self): - utils.tryremove('CMakeCache.txt') - utils.tryremove('cmake_install.cmake') - utils.tryremove('install_manifest.txt') - utils.tryremove_dir('CMakeFiles') - - - def remove_scripts_headers(self): - package = self.get_package_name() - to_package = os.path.abspath(package) - utils.trymkdir(to_package) - with utils.working_directory(to_package): - utils.tryremove('find.cmake') - utils.tryremove('find.script') - utils.tryremove('find.cmd') - utils.tryremove('.build.sh') - utils.tryremove('.build.cmd') - utils.tryremove_dir_empty(to_package) - - - def generate_3rdpartyversion(self, output_dir): - package = self.get_package_name() - package_norm_upper = self.get_package_name_norm_upper() - version = self.get_version() - packing = self.is_packing() - if not packing: - logging.debug("package %s, don't need 3rdpartyversion" % package) - return - thirdparty_path = os.path.join(output_dir, '3rdpartyversions') - utils.trymkdir(thirdparty_path) - with utils.working_directory(thirdparty_path): - with open('%s.cmake' % package, 'wt') as f: - f.write('SET(%s_REQUIRED_VERSION %s EXACT)\n' % (package_norm_upper, version)) - - - def _smart_uncompress(self, position, package_file_abs, uncompress_directory, destiny_directory, compiler_replace_maps): - uncompress = self.get_uncompress(position) - uncompress_strip = self.get_uncompress_strip(position) - uncompress_prefix = self.get_uncompress_prefix(position) - if uncompress: - if (uncompress_strip == uncompress_strip_default) and (uncompress_prefix == uncompress_prefix_default): - # case fast (don't need intermediate folder) - ok = utils.extract_file(package_file_abs, destiny_directory, self.get_first_environment(compiler_replace_maps)) - else: - source_with_strip = os.path.join(uncompress_directory, uncompress_strip) - destiny_with_prefix = os.path.join(destiny_directory, uncompress_prefix) - ok = utils.extract_file(package_file_abs, uncompress_directory, self.get_first_environment(compiler_replace_maps)) - utils.move_folder_recursive(source_with_strip, destiny_with_prefix) - utils.tryremove_dir(source_with_strip) - if not ok: - raise Exception('Invalid uncompressed package %s - %s' % (package, package_file_abs)) - - - def _prepare_third_party(self, position, url, build_directory, compiler_replace_maps): - package = self.get_package_name() - source_filename = self.get_source_filename(position) - uncompress_strip = self.get_uncompress_strip(position) - uncompress_prefix = self.get_uncompress_prefix(position) - uncompress = self.get_uncompress(position) - uncompress_directory = self.get_download_directory() - utils.trymkdir(uncompress_directory) - - logging.debug('source_filename = %s' % source_filename) - logging.debug('uncompress_strip = %s' % uncompress_strip) - logging.debug('uncompress_prefix = %s' % uncompress_prefix) - logging.debug('uncompress = %s' % uncompress) - - # resolve url vars - url = url.replace('$NPP_SERVER', os.environ['NPP_SERVER']) - - # files in svn - if(url.startswith('svn://')): - # strip is not implemmented with svn:// - utils.tryremove_dir( build_directory ) - logging.info('Download from svn: %s' % url) - self.safe_system( 'svn co %s %s' % (url, build_directory), compiler_replace_maps ) - # utils.tryremove_dir( os.path.join(build_directory, '.svn') ) - - elif(url.endswith('.git') or (url.find('github') != -1) or (url.find('bitbucket') != -1)) and not ( url.endswith('.zip') or url.endswith('.tar.gz') or url.endswith('.tar.bz2') or url.endswith('.tgz') or url.endswith('.py') ): - # strip is not implemmented with git:// - utils.tryremove_dir( build_directory ) - logging.info('Download from git: %s' % url) - branch = self.get_branch() - extra_cmd = '' - if branch is not None: - logging.info('clonning to branch %s' % branch) - extra_cmd = '%s' % branch - self.safe_system('git clone %s --depth=200 %s %s' % (extra_cmd, url, build_directory), compiler_replace_maps) - # self.safe_system('git clone %s %s' % (url, build_directory), compiler_replace_maps) - with utils.working_directory(build_directory): - # self.safe_system('git checkout {}'.format(extra_cmd), compiler_replace_maps) - self.safe_system('git submodule init', compiler_replace_maps) - self.safe_system('git submodule update', compiler_replace_maps) - # depends_file = self.user_parameters.depends - # if depends_file is not None: - # with utils.working_directory(build_directory): - # # leer el fichero de dependencias - # if os.path.exists(depends_file): - # data = utils.deserialize(depends_file) - # else: - # data = {} - # - # # obedecer, si trae algo util - # if package in data: - # logging.debug('data package version is %s' % data[package]) - # try: - # git_version = hash_version.to_git_version(build_directory, data[package]) - # logging.debug('data package in git version is %s' % git_version) - # logging.debug('updating to revision %s' % git_version) - # self.safe_system('git reset --hard %s' % git_version, compiler_replace_maps) - # except AssertionError: - # logging.info('using HEAD') - # - # # actualizar y reescribir - # revision = hash_version.get_last_version(build_directory) - # assert(len(revision) > 0) - # data[package] = revision - # utils.serialize(data, depends_file) - # else: - # logging.warning('not found depends file, using newest changeset') - - # file in http - elif ( url.startswith('http://') - or url.startswith('https://') - or url.endswith('.zip') - or url.endswith('.tar.gz') - or url.endswith('.tar.bz2') - or url.endswith('.tgz') - or url.endswith('.py') ): - - logging.info('Download from url: %s' % url) - # download to source_filename - package_file_abs = os.path.join(uncompress_directory, source_filename) - utils.download_from_url(url, package_file_abs) - if os.path.isfile(package_file_abs): - - # uncompress in download folder for after generate a patch with all changes - if not os.path.isdir( self.get_original_directory() ): - utils.trymkdir( self.get_original_directory() ) - logging.debug('preparing original uncompress') - # uncompress in original - self._smart_uncompress(position, package_file_abs, uncompress_directory, self.get_original_directory(), compiler_replace_maps) - else: - logging.debug('skipping original uncompress (already exists)') - - # uncompress in intermediate build directory - self._smart_uncompress(position, package_file_abs, uncompress_directory, build_directory, compiler_replace_maps) - - else: - raise DontExistsFile(source_filename) - - else: - raise Exception('Invalid source: %s - %s' % (package, url)) - - - def prepare_third_party(self, build_directory, compiler_replace_maps): - utils.trymkdir(build_directory) - package = self.get_package_name() - version = self.get_version() - sources_all = self.get_sources_all() - exceptions = [] - i = 0 - for source_url in self.get_source(): - if (source_url is None) or (len(source_url) <= 0) or (source_url == 'skip'): - logging.warning('[%s %s] Skipping preparation ...' % (package, version)) - else: - logging.warning('[%s %s] trying prepare from %s ...' % (package, version, source_url)) - try: - self._prepare_third_party(i, source_url, build_directory, compiler_replace_maps) - if not sources_all: - # sources_all = false ---> any source - # sources_all = Trie ----> all source - break - except exceptions_fail_group + exceptions_fail_program: - raise - except: - exceptions.append(sys.exc_info()) - i += 1 - if len(exceptions) > 0: - i = 0 - for exc_type, exc_value, exc_traceback in exceptions: - print ("---- Exception #%d / %d ----------" % (i+1, len(exceptions))) - traceback.print_exception(exc_type, exc_value, exc_traceback) - print ("----------------------------------") - i += 1 - raise FailPrepare(self) - - - def get_prefered_build_mode(self, prefered_build_mode_list): - build_modes = self.get_build_modes() - assert(len(prefered_build_mode_list) > 0) - prefered_build_mode = prefered_build_mode_list[0] - while (prefered_build_mode not in build_modes) and (len(prefered_build_mode_list)>0): - prefered_build_mode_list.pop(0) - if len(prefered_build_mode_list) > 0: - prefered_build_mode = prefered_build_mode_list[0] - return prefered_build_mode - - - def generate_cmake_condition(self, platforms, compiler_replace_maps): - target_uniques = set() - condition = '' - i = 0 - for plat in platforms: - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, _, _ in self.compiler_iterator(plat, compiler_replace_maps): - for package, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - package_lower = package.lower() - if (package_lower not in target_uniques) and (package_lower != 'dummy'): - target_uniques.add(package_lower) - if self.has_library(platform_info): - if i == 0: - condition += '(NOT TARGET %s)' % package_lower - else: - condition += ' OR (NOT TARGET %s)' % package_lower - i += 1 - return condition - - - def _search_library(self, rootdir, special_pattern): - ''' - 3 cases: - string - pattern as special string - list of strings - ''' - logging.debug('-- searching in {} with pattern: {}'.format(rootdir, special_pattern)) - - if special_pattern is None: - logging.debug('Failed searching lib in %s' % rootdir) - return False, None - - package = self.get_package_name() - if isinstance(special_pattern, list): - utils.verbose(self.user_parameters, 'Searching list %s' % special_pattern) - valid_ff = None - for ff in special_pattern: - valid, valid_ff = self._search_library(rootdir, utils.get_norm_path(ff)) - if valid: - break - return valid, valid_ff - - elif special_pattern.startswith('/') and special_pattern.endswith('/'): - pattern = special_pattern[1:-1] - utils.verbose(self.user_parameters, 'Searching rootdir %s, pattern %s' % (rootdir, pattern)) - files_found = utils.rec_glob(rootdir, pattern) - utils.verbose(self.user_parameters, 'Candidates %s' % files_found) - if len(files_found) == 1: - relfile = os.path.relpath(files_found[0], rootdir) - return True, utils.get_norm_path(relfile) - elif len(files_found) == 0: - msg = 'No library found in %s with pattern %s' % (rootdir, pattern) - logging.debug(msg) - return False, None - else: - msg = "Ambiguation in %s" % (package) - logging.debug(msg) - return False, None - else: - pathfull = os.path.join(rootdir, special_pattern) - utils.verbose(self.user_parameters, 'Checking file %s' % pathfull) - if os.path.exists(pathfull): - return True, utils.get_norm_path(special_pattern) - else: - return False, None - - - def search_library(self, workbase, dataset, kind, rootdir=None): - ''' - can throw exception - ''' - build_mode = self.get_prefered_build_mode(prefered[os.environ['MODE']]) - if rootdir is None: - rootdir = workbase - utils.verbose(self.user_parameters, 'Searching rootdir %s' % (rootdir)) - if (build_mode.lower() in dataset) and (kind in dataset[build_mode.lower()]): - special_pattern = dataset[build_mode.lower()][kind] - valid, valid_ff = self._search_library(rootdir, special_pattern) - if valid: - return valid_ff - else: - package = self.get_package_name() - raise AmbiguationLibs(kind, package, build_mode) - else: - raise NotFoundInDataset("Not found in dataset, searching %s - %s" % (build_mode.lower(), kind)) - - - def search_library_noexcept(self, workbase, dataset, kind): - try: - rootdir = os.path.abspath(workbase) - finalpath = self.search_library(workbase, dataset, kind, rootdir) - utils.superverbose(self.user_parameters, '[01] path: %s' % finalpath) - return finalpath - except AmbiguationLibs: - finalpath = '%s.%s' % (magic_invalid_file, kind) - utils.superverbose(self.user_parameters, '[02] path: %s' % finalpath) - return finalpath - except NotFoundInDataset: - finalpath = '%s.%s' % (magic_invalid_file, kind) - utils.superverbose(self.user_parameters, '[03] path: %s' % finalpath) - return finalpath - - - def check_parts_exists(self, workbase, package, target, dataset, kindlibs, build_modes=None): - ''' - Asegura que todas las partes del target existen, devuelve True o False si todas las partes existen - - workbase: directorio de instalacion base - package: nombre del paquete - target: nombre del target - dataset: es la estructura que contiene las estrategias de busqueda - {"debug": {"part1": ["*.dll", "*d.dll"]}, "release": {"part1": ["*_release.dll"]}} - kindlibs: tupla de partes a verificar, cada tupla representa (tipo, obligatoriedad) - build_modes: restringuir la busqueda a ciertos build modes - ''' - - all_ok = True - if build_modes is None: - build_modes = self.get_build_modes() - for build_mode in build_modes: - for kind, must in kindlibs: - try: - part_fullpath = os.path.join(workbase, self.search_library_noexcept(workbase, dataset, kind)) - if not os.path.exists(part_fullpath): - if must: - logging.error("[%s] Don't found %s in %s. Mode: %s. Path: %s. Dataset: %s" % (package, kind, target, build_mode, part_fullpath, dataset)) - all_ok = False - else: - msg = "[%s] Don't found %s in %s. Mode: %s. Path: %s" % (package, kind, target, build_mode, part_fullpath) - if build_mode != 'Release': - logging.warning(msg) - else: - logging.debug(msg) - except NotFoundInDataset as e: - if must: - logging.error("[ERROR] [NOT FOUND] [%s] %s" % (package, e)) - all_ok = False - return all_ok - - - def is_invalid_lib(self, libpath): - return (libpath is None) or (utils.get_filename_no_ext(os.path.basename(libpath)) == magic_invalid_file) - - - def generate_cmakefiles(self, platforms, folder_output, compiler_replace_maps): - errors = 0 - packing = self.is_packing() - if not packing: - logging.warning("package: %s don't need generate cmakefiles" % self.get_package_name()) - return errors - oldcwd = os.getcwd() - utils.trymkdir(folder_output) - with utils.working_directory(folder_output): - package = self.get_package_name() - package_lower = package.lower() - package_upper = package.upper() - with open('%s-config.cmake' % package_lower, 'wt') as f: - f.write('''CMAKE_POLICY(PUSH) -CMAKE_POLICY(VERSION 3.0) -cmake_minimum_required(VERSION 3.0) -cmake_policy(SET CMP0011 NEW) - ''') - - condition = self.generate_cmake_condition(platforms, compiler_replace_maps) - if len(condition) > 0: - f.write('\nif(%s)\n' % condition) - - f.write('''\ninclude(${CMAKI_PATH}/facts/facts.cmake) -cmaki_download_package() -file(TO_NATIVE_PATH "${_DIR}" %s_HOME) -file(TO_NATIVE_PATH "${_DIR}/${CMAKI_PLATFORM}" %s_PREFIX) -set(%s_HOME "${%s_HOME}" PARENT_SCOPE) -set(%s_PREFIX "${%s_PREFIX}" PARENT_SCOPE) -include(${_MY_DIR}/${CMAKI_PLATFORM}.cmake) - ''' % (package_upper, package_upper, package_upper, package_upper, package_upper, package_upper)) - - if len(condition) > 0: - f.write('\nendif()\n') - - f.write('\nCMAKE_POLICY(POP)') - - with open('%s-config-version.cmake' % package_lower, 'wt') as f: - f.write('''\ -cmake_minimum_required(VERSION 3.0) -cmake_policy(SET CMP0011 NEW) -include(${CMAKI_PATH}/facts/facts.cmake) -cmaki_package_version_check() - ''') - - for plat in platforms: - - workspace = self.get_workspace(plat) - base_folder = self.get_base_folder() - - for compiler_c, compiler_cpp, _, ext_sta, ext_dyn, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): - - with open('%s.cmake' % (plat), 'wt') as f: - - install_3rdparty_dependencies = True - - includes_set = [] - definitions_set = [] - system_depends_set = [] - depends_set = set() - - for target, platform_info in self.get_generator_targets(plat, compiler_c, compiler_cpp, ext_sta, ext_dyn): - - target_lower = target.lower() - target_upper = target.upper() - - if self.has_library(platform_info) and (target != 'dummy'): - f.write('if(NOT TARGET %s)\n\n' % target_lower) - - try: - add_3rdparty_dependencies = platform_info['add_3rdparty_dependencies'] - except KeyError: - add_3rdparty_dependencies = True - - try: - lib_provided = platform_info['lib_provided'] - except KeyError: - lib_provided = True - - if 'include' in platform_info: - include = platform_info['include'] - for d in include: - includes_set.append(d) - - # rename to definitions - if 'definitions' in platform_info: - definitions = platform_info['definitions'] - if definitions is not None: - for d in definitions: - definitions_set.append(d) - - if 'system_depends' in platform_info: - system_depends = platform_info['system_depends'] - if system_depends is not None: - for sd in system_depends: - system_depends_set.append(sd) - - if 'targets_paths' in self.parameters: - targets_paths = self.parameters['targets_paths'] - if targets_paths is not None: - for key, value in targets_paths.items(): - f.write('file(TO_NATIVE_PATH "%s" %s)\n' % (value, key)) - - # work_base = os.path.join(oldcwd, workspace, base_folder, plat) - work_base = self.get_install_directory(plat) - - if ('executable' in platform_info) and (target != 'dummy'): - # a target in mode executable, dont need install - install_3rdparty_dependencies = False - - if 'use_run_with_libs' in platform_info: - if utils.is_windows(): - f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.cmd" %s_LAUNCHER)\n' % target_upper) - else: - f.write('file(TO_NATIVE_PATH "${_MY_DIR}/../../run_with_libs.sh" %s_LAUNCHER)\n' % target_upper) - - executable = platform_info['executable'] - if not self.check_parts_exists(work_base, package, target, executable, [('bin', True)], build_modes=['Release']): - errors += 1 - release_bin = self.search_library_noexcept(work_base, executable, 'bin') - - for suffix in ['', '_EXECUTABLE']: - if 'use_run_with_libs' in platform_info: - f.write('set(%s%s "${%s_LAUNCHER}" "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, target_upper, plat, utils.get_norm_path(release_bin, native=False))) - else: - f.write('set(%s%s "${_DIR}/%s/%s" PARENT_SCOPE)\n' % (target_upper, suffix, plat, utils.get_norm_path(release_bin, native=False))) - f.write('file(TO_NATIVE_PATH "${%s%s}" %s%s)\n' % (target_upper, suffix, target_upper, suffix)) - f.write('\n') - - if ('dynamic' in platform_info) and (target != 'dummy'): - - dynamic = platform_info['dynamic'] - - # add depend - if add_3rdparty_dependencies: - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) - - if utils.is_windows(): - if not self.check_parts_exists(work_base, package, target, dynamic, [('dll', True), ('lib', lib_provided), ('pdb', False)]): - errors += 1 - - debug_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - release_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - relwithdebinfo_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - minsizerel_dll = self.search_library_noexcept(work_base, dynamic, 'dll') - - debug_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - release_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - relwithdebinfo_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - minsizerel_lib = self.search_library_noexcept(work_base, dynamic, 'lib') - - try: - relwithdebinfo_pdb = self.search_library(work_base, dynamic, 'pdb') - except Exception as e: - logging.debug('exception searching lib: %s' % e) - relwithdebinfo_pdb = None - - try: - debug_pdb = self.search_library(work_base, dynamic, 'pdb') - except Exception as e: - logging.debug('exception searching lib: %s' % e) - debug_pdb = None - - f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # dll - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_dll, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_dll, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_dll, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_dll, native=False))) - f.write('\n') - - # lib - if not self.is_invalid_lib(debug_lib): - f.write('\tIMPORTED_IMPLIB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) - if not self.is_invalid_lib(release_lib): - f.write('\tIMPORTED_IMPLIB_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) - if not self.is_invalid_lib(relwithdebinfo_lib): - f.write('\tIMPORTED_IMPLIB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) - if not self.is_invalid_lib(minsizerel_lib): - f.write('\tIMPORTED_IMPLIB_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) - f.write('\n') - - # pdb - if not self.is_invalid_lib(debug_pdb): - f.write('\tIMPORTED_PDB_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_pdb, native=False))) - - if not self.is_invalid_lib(relwithdebinfo_pdb): - f.write('\tIMPORTED_PDB_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_pdb, native=False))) - - f.write(')\n') - else: - - if not self.check_parts_exists(work_base, package, target, dynamic, [('so', True)]): - errors += 1 - - debug_so = self.search_library_noexcept(work_base, dynamic, 'so') - release_so = self.search_library_noexcept(work_base, dynamic, 'so') - relwithdebinfo_so = self.search_library_noexcept(work_base, dynamic, 'so') - minsizerel_so = self.search_library_noexcept(work_base, dynamic, 'so') - - try: - debug_so_full = os.path.join(oldcwd, work_base, debug_so) - debug_soname = utils.get_soname(debug_so_full, env=env_modified) - logging.debug('detected soname in debug library: {}'.format(debug_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - debug_soname = None - - try: - release_so_full = os.path.join(oldcwd, work_base, release_so) - release_soname = utils.get_soname(release_so_full, env=env_modified) - logging.debug('detected soname in release library: {}'.format(release_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - release_soname = None - - try: - relwithdebinfo_so_full = os.path.join(oldcwd, work_base, relwithdebinfo_so) - relwithdebinfo_soname = utils.get_soname(relwithdebinfo_so_full, env=env_modified) - logging.debug('detected soname in relwithdebinfo library: {}'.format(relwithdebinfo_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - relwithdebinfo_soname = None - - try: - minsizerel_so_full = os.path.join(oldcwd, work_base, minsizerel_so) - minsizerel_soname = utils.get_soname(minsizerel_so_full, env=env_modified) - logging.debug('detected soname in minsizerel library: {}'.format(minsizerel_soname)) - except Exception as e: - logging.debug('exception searching lib: %s' % e) - minsizerel_soname = None - - f.write('ADD_LIBRARY(%s SHARED IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # so - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_so, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_so, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_so, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_so, native=False))) - f.write('\n') - - # soname - if (debug_soname is not None) and os.path.exists( os.path.join(os.path.dirname(debug_so_full), debug_soname) ): - f.write('\tIMPORTED_SONAME_DEBUG "%s"\n' % utils.get_norm_path(debug_soname, native=False)) - - if (release_soname is not None) and os.path.exists( os.path.join(os.path.dirname(release_so_full), release_soname) ): - f.write('\tIMPORTED_SONAME_RELEASE "%s"\n' % utils.get_norm_path(release_soname, native=False)) - - if (relwithdebinfo_soname is not None) and os.path.exists( os.path.join(os.path.dirname(relwithdebinfo_so_full), relwithdebinfo_soname) ): - f.write('\tIMPORTED_SONAME_RELWITHDEBINFO "%s"\n' % utils.get_norm_path(relwithdebinfo_soname, native=False)) - - if (minsizerel_soname is not None) and os.path.exists( os.path.join(os.path.dirname(minsizerel_so_full), minsizerel_soname) ): - f.write('\tIMPORTED_SONAME_MINSIZEREL "%s"\n' % utils.get_norm_path(minsizerel_soname, native=False)) - - f.write(')\n') - - if ('static' in platform_info) and (target != 'dummy'): - - static = platform_info['static'] - - if not self.check_parts_exists(work_base, package, target, static, [('lib', True)]): - errors += 1 - - debug_lib = self.search_library_noexcept(work_base, static, 'lib') - release_lib = self.search_library_noexcept(work_base, static, 'lib') - relwithdebinfo_lib = self.search_library_noexcept(work_base, static, 'lib') - minsizerel_lib = self.search_library_noexcept(work_base, static, 'lib') - - if add_3rdparty_dependencies: - # register target - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, target_lower)) - - f.write('ADD_LIBRARY(%s STATIC IMPORTED)\n' % target_lower) - f.write('SET_PROPERTY(TARGET %s APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)\n' % target_lower) - f.write('SET_TARGET_PROPERTIES(%s PROPERTIES\n' % target_lower) - - # lib - f.write('\tIMPORTED_LOCATION_DEBUG "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(debug_lib, native=False))) - f.write('\tIMPORTED_LOCATION_RELEASE "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(release_lib, native=False))) - f.write('\tIMPORTED_LOCATION_RELWITHDEBINFO "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(relwithdebinfo_lib, native=False))) - f.write('\tIMPORTED_LOCATION_MINSIZEREL "${_DIR}/%s/%s"\n' % (plat, utils.get_norm_path(minsizerel_lib, native=False))) - - f.write(')\n') - - if install_3rdparty_dependencies and (target != 'dummy'): - f.write('cmaki_install_3rdparty(%s)\n' % target_lower) - f.write('\n') - - if self.has_library(platform_info) and (target != 'dummy'): - f.write('endif()\n\n') - - # print includes - if len(includes_set) > 0: - for d in list(set(includes_set)): - f.write('list(APPEND %s_INCLUDE_DIRS ${_DIR}/%s)\n' % (package_upper, d)) - - f.write('\n') - - if len(definitions_set) > 0: - for d in list(set(definitions_set)): - f.write('add_definitions(%s)\n' % d) - f.write('\n') - - if len(system_depends_set) > 0: - f.write('# begin system depends\n') - for sd in list(set(system_depends_set)): - f.write('list(APPEND %s_LIBRARIES %s)\n' % (package_upper, sd)) - f.write('# end system depends\n') - - # if self.get_generate_find_package(): - # f.write('# Depends of %s (%s)\n' % (self.get_package_name(), self.get_version())) - # for dep in self.get_depends_raw(): - # package_name = dep.get_package_name() - # if package_name not in depends_set: - # if dep.have_any_in_target(plat, 'dynamic', compiler_replace_maps): - # f.write('cmaki_find_package(%s)\n' % (package_name)) - # else: - # f.write('# cmaki_find_package(%s) # static package\n' % (package_name)) - # depends_set.add(package_name) - # f.write('\n') - - logging.info('----------------------------------------------------') - if self.user_parameters.fast: - logging.debug('skipping for because is in fast mode: "generate_cmakefiles"') - break - - return errors - - - def show_environment_vars(self, env_modified): - package = self.get_package_name() - logging.debug('------- begin print environment variables for compile %s ---------' % package) - for key, value in sorted(env_modified.items()): - logging.debug("%s=%s" % (key, value)) - logging.debug('------- end print environment variables for compile %s -----------' % package) - - - def get_first_environment(self, compiler_replace_maps): - for plat in platforms: - for _, _, _, _, _, env_modified, _ in self.compiler_iterator(plat, compiler_replace_maps): - return env_modified - return os.environ.copy() - - - def safe_system(self, cmd, compiler_replace_maps): - return utils.safe_system(cmd, env=self.get_first_environment(compiler_replace_maps)) - - - def remove_packages(self): - # remove packages before - for plat in platforms: - prefix_package = os.path.join(self.user_parameters.prefix, '%s.tar.gz' % self.get_workspace(plat)) - prefix_package_cmake = os.path.join(self.user_parameters.prefix, '%s-cmakelib-%s.tar.gz' % (self.get_base_folder(), sys.platform)) - prefix_folder_cmake = os.path.join(self.user_parameters.third_party_dir, self.get_base_folder()) - logging.info("preremoving package %s" % prefix_package) - logging.info("preremoving package cmakefiles %s" % prefix_package_cmake) - logging.info("preremoving folder cmakefiles %s" % prefix_folder_cmake) - utils.tryremove(prefix_package) - utils.tryremove(prefix_package_cmake) - utils.tryremove_dir(prefix_folder_cmake) - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt deleted file mode 100644 index a7a3475..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/unittest/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -PROJECT(UNITEST_CMAKI_GENERATOR_${CMAKI_PLATFORM}_${CMAKE_BUILD_TYPE} CXX) -cmake_minimum_required(VERSION 3.0) - -include(cmaki) - -get_filename_component(BASEDIR "${CMAKE_CURRENT_LIST_FILE}" PATH) -set(CMAKE_INSTALL_PREFIX ${CMAKE_CURRENT_BINARY_DIR}) -set(EXECUTABLE_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") -set(LIBRARY_OUTPUT_PATH "${CMAKE_INSTALL_PREFIX}") - -foreach(PACKAGE_ITER ${FIND_PACKAGES}) - string(TOUPPER ${PACKAGE_ITER} PACKAGE_UPPER) - string(REGEX REPLACE "-" "_" PACKAGE_UPPER ${PACKAGE_UPPER}) - include("${DEPENDS_PATH}/3rdpartyversions/${PACKAGE_ITER}.cmake") - message("find_package in test: ${PACKAGE_UPPER}, version: ${${PACKAGE_UPPER}_REQUIRED_VERSION}") - cmaki_find_package(${PACKAGE_ITER} ${${PACKAGE_UPPER}_REQUIRED_VERSION}) -endforeach() -message("include dirs: ${CMAKI_INCLUDE_DIRS}") -message("libs to link in test: ${CMAKI_LIBRARIES}") - -foreach(INCLUDE_DIR ${CMAKI_INCLUDE_DIRS}) - include_directories(${INCLUDE_DIR}) -endforeach() -add_executable(test_${CMAKI_PLATFORM} ${UNITTEST_PATH}) -target_link_libraries(test_${CMAKI_PLATFORM} ${CMAKI_LIBRARIES}) -install(TARGETS test_${CMAKI_PLATFORM} DESTINATION "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") - -enable_testing() -add_test(NAME test_cmake_${CMAKI_PLATFORM} COMMAND test_${CMAKI_PLATFORM} WORKING_DIRECTORY "${CMAKE_INSTALL_PREFIX}/${CMAKE_BUILD_TYPE}") - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py deleted file mode 100644 index 034813c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import logging -import utils -from third_party import platforms - - -def upload(node, parameters, compiler_replace_maps): - - if parameters.server is None: - logging.warning('parameter --server is mandatory for upload, skipping upload') - else: - # pack tar.gz binaries - for plat in platforms: - prefix_package = os.path.join(parameters.prefix, '%s.tar.gz' % node.get_workspace(plat)) - if not os.path.isfile(prefix_package): - logging.error('error dont exitsts: {}'.format(prefix_package)) - return False - command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package) - node.ret += abs(utils.safe_system(command)) - - if node.ret != 0: - return False - - # pack cmakefiles - if not parameters.no_packing_cmakefiles: - for plat in platforms: - base_folder = node.get_base_folder() - prefix_package_cmake = os.path.join(parameters.prefix, '%s-%s-cmake.tar.gz' % (base_folder, plat)) - if not os.path.isfile(prefix_package_cmake): - logging.error('error dont exitsts: {}'.format(prefix_package_cmake)) - return False - command = "python upload_package.py --url=%s/upload.php --filename=%s" % (parameters.server, prefix_package_cmake) - node.ret += abs(utils.safe_system(command)) - - return True diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py deleted file mode 100644 index 1d57c34..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/upload_package.py +++ /dev/null @@ -1,48 +0,0 @@ -import os -import sys -import logging -# import urllib2 -import argparse -import logging -# import poster -import requests - -logger = logging.getLogger(__name__) - -if __name__ == '__main__': - parser = argparse.ArgumentParser() - parser.add_argument('--url', required=True, dest='url', help='url') - parser.add_argument('--filename', required=True, dest='filename', help='filename') - parser.add_argument('--field', dest='field', help='field name', default='uploaded') - parameters = parser.parse_args() - - if not os.path.exists(parameters.filename): - logging.error('dont exists %s' % parameters.filename) - sys.exit(1) - - with open(parameters.filename, 'rb') as f: - try: - response = requests.post(parameters.url, files={parameters.field: f}) - if response.status_code == 200: - sys.exit(0) - else: - logger.error('Error uploading file {} to {}'.format(parameters.filename, parameters.url)) - sys.exit(0) - except Exception as e: - logger.error('Exception uploading file {} to {}'.format(parameters.filename, parameters.url)) - sys.exit(0) - - # # Register the streaming http handlers with urllib2 - # poster.streaminghttp.register_openers() - # - # with open(parameters.filename, "rb") as f: - # datagen, headers = poster.encode.multipart_encode({parameters.field: f}) - # # Create the Request object - # request = urllib2.Request(parameters.url, datagen, headers) - # # Actually do the request, and get the response - # handler = urllib2.urlopen(request) - # logging.info( handler.read() ) - # if handler.getcode() == 200: - # sys.exit(0) - # else: - # sys.exit(1) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py deleted file mode 100644 index 767d218..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_generator/utils.py +++ /dev/null @@ -1,531 +0,0 @@ -import os -import re -import sys -import shutil -import logging -import glob -import subprocess -import tarfile -import zipfile -import time -import contextlib -import hashlib -import yaml -import json -import errno -import multiprocessing -import fnmatch -from requests import get # to make GET request -from distutils.spawn import find_executable -try: - import bz2 - python_has_bz2 = True -except ImportError: - logging.debug('python module bz2 built-in is not available') - python_has_bz2 = False - - -class NotFoundProgram(Exception): - def __init__(self, msg): - self._msg = msg - def __repr__(self): - return "%s" % self._msg - - -def is_windows(): - return sys.platform.startswith("win") - - -def smart_merge(dict1, dict2): - assert(dict1 is not None) - assert(dict2 is not None) - for key, value in dict2.items(): - if isinstance(value, dict): - try: - dict1[key].update(value) - except KeyError: - dict1[key] = value - elif isinstance(value, list): - try: - dict1[key] += value - except KeyError: - dict1[key] = value - else: - dict1[key] = value - return dict1 - - -def apply_replaces(element, dictionary): - if isinstance(element, dict): - new = {} - for k,v in element.items(): - new[k] = apply_replaces(v, dictionary) - return new - elif isinstance(element, list): - new = [] - for e in element: - new.append( apply_replaces(e, dictionary) ) - return new - elif isinstance(element, bool): - return element - elif element is not None: - new_element = str(element) - for k,v in dictionary.items(): - # find in original, not in replaced - if str(element).find(k) != -1: - new_element = new_element.replace(k, v) - return new_element - else: - return None - - -def apply_replaces_vars(element, dictionary): - newdict = {} - for k,v in dictionary.items(): - newdict['$%s' % k] = v - newdict['${%s}' % k] = v - return apply_replaces(element, newdict) - - -def tryremove(filename): - try: - logging.debug('Removing file %s' % (filename)) - os.remove(filename) - except OSError: - pass - - -def _tryremove_dir(directory): - i = 0 - tries = 3 - while os.path.isdir(directory): - try: - shutil.rmtree(directory) - if not os.path.exists(directory): - i = tries + 1 - except OSError: - logging.debug('Fail removing %s. Retry %d/%d' % (directory, i + 1, tries)) - if i < tries: - time.sleep(1) - else: - raise Exception("Fail removing %s" % os.path.abspath(directory)) - finally: - i += 1 - - -def tryremove_dir(source): - logging.debug('Removing directory %s' % (source)) - if sys.platform.startswith('win'): - if os.path.isdir(source) and safe_system('rd /s /q %s' % source) != 0: - raise Exception('Fail removing %s' % source) - else: - _tryremove_dir(source) - - -def tryremove_dir_empty(source): - try: - os.rmdir(source) - except OSError as ex: - if ex.errno != errno.ENOTEMPTY: - logging.debug('Removing empty directory %s' % (source)) - - -def download_from_url(url, file_name): - with open(file_name, "wb") as file: - response = get(url) - file.write(response.content) - - -def setup_logging(level, logname): - format_console_log = '%(asctime)s %(levelname)-7s %(message)s' - format_date = '%H-%M:%S' - dirlog = os.path.dirname(logname) - if dirlog != '': - trymkdir(dirlog) - logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - if(len(logging.root.handlers) == 1): - logging.root.removeHandler( logging.root.handlers[0] ) - handler = logging.StreamHandler() - handler.setLevel(level) - handler.setFormatter(logging.Formatter(format_console_log, format_date)) - logger.addHandler(handler) - handler2 = logging.FileHandler(logname) - handler2.setLevel(logging.DEBUG) - handler2.setFormatter(logging.Formatter(format_console_log, format_date)) - logger.addHandler(handler2) - - -def prompt_yes_no(default = False): - # raw_input returns the empty string for "enter" - yes = set(['yes','y', 'ye', '']) - no = set(['no','n']) - - choice = raw_input().lower() - if choice in yes: - return True - elif choice in no: - return False - else: - sys.stdout.write("Please respond with 'yes' or 'no'") - return default - - -def show_element(element, deep = 0): - if isinstance(element, dict): - for k,v in element.items(): - logging.info("%s<%s>" % ('\t'*deep, k)) - show_element(v, deep + 1) - elif isinstance(element, list): - for e in element: - show_element(e, deep + 1) - else: - logging.info('%s%s' % ('\t'*deep, element)) - - - -def rec_glob(rootdir, pattern): - - # logging.info('---> {} [START]'.format(rootdir)) - result = [] - for root, dirs, files in os.walk(rootdir): - # logging.info('---> {}'.format(root)) - for file in files: - # logging.info('---> {}'.format(file)) - if fnmatch.fnmatch(file, pattern): - # logging.info('---> {} [MATCH]'.format(file)) - result.append(os.path.join(root, file)) - return result - - -def trymkdir(directory): - if not os.path.exists( directory ): - os.makedirs( directory ) - - -def move_folder_recursive(source, destiny): - if not os.path.exists(source): - raise Exception('Error in move_folder_recursive: source not exists: %s' % source) - logging.debug('move recursive from {} to {}'.format(source, destiny)) - for archive in os.listdir(source): - # ignore some stuff - if archive.startswith('.git') or archive.startswith('.svn'): - continue - archive2 = os.path.join(source, archive) - destiny2 = os.path.join(destiny, archive) - if(os.path.isdir(archive2)): - move_folder_recursive(archive2, destiny2) - else: - if os.path.isfile(destiny2): - logging.debug('Replacing file %s' % destiny2) - tryremove(destiny2) - # try create destiny directory - trymkdir( os.path.dirname(destiny2) ) - # move file - shutil.move(archive2, destiny2) - - -def copy_folder_recursive(source, destiny): - if not os.path.exists(source): - raise Exception('Error in copy_folder_recursive: source not exists: %s' % source) - for archive in os.listdir(source): - # ignore some stuff - if archive.startswith('.git') or archive.startswith('.svn'): - continue - archive2 = os.path.join(source, archive) - destiny2 = os.path.join(destiny, archive) - if(os.path.isdir(archive2)): - copy_folder_recursive(archive2, destiny2) - else: - if os.path.isfile(destiny2): - logging.debug('Replacing file %s' % destiny2) - tryremove(destiny2) - # try create destiny directory - trymkdir( os.path.dirname(destiny2) ) - # copy file (and stat) - shutil.copy2(archive2, destiny2) - - -def extract_file(path, to_directory, environment): - - # convert to absolute - logging.debug('Extract file %s' % path) - path = os.path.abspath(path) - - if path.endswith('.zip'): - opener, mode = zipfile.ZipFile, 'r' - # elif path.endswith('.tar.gz') or path.endswith('.tgz'): - # opener, mode = tarfile.open, 'r:gz' - elif path.endswith('.tar.gz') or path.endswith('.tgz'): - # python have problems with big .tar.gz in linux -_- - if is_windows(): - with working_directory(to_directory): - logging.debug('Using cmake -E tar for package: %s' % path) - ret = safe_system('cmake -E tar zxvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - else: - with working_directory(to_directory): - logging.debug('Using system tar for package: %s' % path) - ret = safe_system('tar zxvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - elif path.endswith('.tar.bz2') or path.endswith('.tbz'): - # python have problems with big .tar.bz2 in windows - if is_windows(): - with working_directory(to_directory): - logging.debug('Using cmake -E tar for package: %s' % path) - ret = safe_system('cmake -E tar xvf %s' % path, env=environment) - ok = (ret == 0) - # be careful, early return - return ok - else: - if python_has_bz2: - opener, mode = tarfile.open, 'r:bz2' - else: - logging.warning('Not using python-bz2 module for uncompress: %s in %s' % (path, to_directory)) - with working_directory(to_directory): - logging.debug('Using bunzip2 and tar for package: %s' % path) - ret = safe_system('bunzip2 -c %s | tar xvf -' % path, env=environment) - ok = (ret == 0) - - # be careful, early return - return ok - elif path.endswith('.tar.xz'): - # needd "xz" - with working_directory(to_directory): - ret = safe_system('tar xpvf %s' % path, env=environment) - ok = (ret == 0) - return ok - else: - raise ValueError("Could not extract `%s` as no appropriate extractor is found" % path) - - # create directory if not exists - trymkdir(to_directory) - with working_directory(to_directory): - file = opener(path, mode) - try: - file.extractall() - finally: - file.close() - return True - - -# Copy Paste from run_tests (handler.py) -def detect_ncpus(): - return multiprocessing.cpu_count() - - -def get_norm_path(pathfile, native=True): - if native and is_windows(): - return pathfile.replace('/', '\\') - else: - return pathfile.replace('\\', '/') - - -def get_filename_no_ext(filename): - return os.path.splitext(filename)[0] - - -def get_soname(libfile, env=os.environ.copy()): - - if is_windows(): - logging.error('get_soname is not supported in windows') - return - - cmd = ['objdump', "-p", libfile] - for line in get_stdout(cmd, env, 'objdump'): - if line.find('SONAME') != -1: - return line.split()[1] - raise Exception('No soname detected in %s' % libfile) - - -def get_needed(libfile, env=os.environ.copy()): - - if is_windows(): - logging.error('get_needed is not supported in windows') - return - - cmd = ['objdump', "-p", libfile] - for line in get_stdout(cmd, env, 'objdump'): - if line.find('NEEDED') != -1: - yield line.split()[1] - - -def get_real_home(): - if sys.platform.startswith("sun"): - # problems launching subshell in solaris - return os.environ['HOME'] - elif sys.platform.startswith("linux"): - cmd = "REAL_HOME=$(cd $HOME && pwd -P) && echo $REAL_HOME" - for line in get_stdout(cmd): - return line - return os.environ['HOME'] - else: - return os.path.expanduser('~') - - -@contextlib.contextmanager -def working_directory(path): - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) - - -def walklevel(some_dir, level=1): - ''' - os.walk() with max level - ''' - some_dir = some_dir.rstrip(os.path.sep) - if not os.path.isdir(some_dir): - logging.error('%s is not folder' % some_dir) - sys.exit(1) - - num_sep = some_dir.count(os.path.sep) - for root, dirs, files in os.walk(some_dir): - yield root, dirs, files - num_sep_this = root.count(os.path.sep) - if num_sep + level <= num_sep_this: - del dirs[:] - - -def get_revision_svn(repo, path_svn='svn', env=os.environ.copy()): - ''' - This command need svn in PATH - ''' - if os.path.exists(repo): - with working_directory(repo): - env_copy = env.copy() - svn_bin = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'bin')) - svn_lib = os.path.abspath(os.path.join(os.path.dirname(path_svn), '..', 'lib')) - env_copy['PATH'] = "%s:%s" % (svn_bin, env_copy['PATH']) - env_copy['LD_LIBRARY_PATH'] = "%s:%s" % (svn_lib, env_copy['LD_LIBRARY_PATH']) - cmd = "%s info" % path_svn - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines=True, env=env_copy) - data, err = p.communicate() - - # clean stdout - data = [line.strip() for line in data.split('\n') if line.strip()] - - for line in data: - separator = 'Last Changed Rev: ' - if line.startswith(separator): - return int(line[len(separator):]) - else: - separator = 'Revisi.n del .ltimo cambio: ' - if re.match(separator, line) is not None: - return int(line[len(separator):]) - return -1 - - -def verbose(parameters, msg): - if parameters.verbose > 0: - logging.info(msg) - - -def superverbose(parameters, msg): - if parameters.verbose > 1: - logging.info(msg) - - -def hyperverbose(parameters, msg): - if parameters.verbose > 2: - logging.info(msg) - - -def md5sum(filename, blocksize=65536): - hash = hashlib.md5() - with open(filename, "rb") as f: - for block in iter(lambda: f.read(blocksize), b""): - hash.update(block) - return hash.hexdigest() - - -def serialize(pythonDict, fileName): - serialize_json(pythonDict, fileName) - - -def deserialize(fileName): - return deserialize_json(fileName) - - -def serialize_yaml(pythonDict, fileName): - serialiedData = yaml.dump(pythonDict, default_flow_style=True) - with open(fileName, 'wt') as f: - f.write(serialiedData) - - -def deserialize_yaml(fileName): - with open(fileName, 'rt') as f: - stringData = f.read() - return yaml.load(stringData) - - -def serialize_json(pythonDict, fileName): - serialiedData = json.dumps(pythonDict) - with open(fileName, 'wt') as f: - f.write(serialiedData) - - -def deserialize_json(fileName): - with open(fileName, 'rt') as f: - stringData = f.read() - return json.loads(stringData) - - -def get_stdout(cmd, env=os.environ.copy(), program_required=None): - if isinstance(cmd, list): - cmd = ' '.join(cmd) - # logging.debug('launch cmd: %s' % cmd) - - # search executable - ok = True - if program_required is not None: - ok = find_executable(program_required, env['PATH']) - if ok: - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) - data, err = p.communicate() - data = [line.strip() for line in data.split('\n') if line.strip()] - for line in data: - # logging.debug('[out cmd] %s' % line) - yield line - else: - raise NotFoundProgram('Not found program %s, for execute: %s' % (program_required, cmd)) - - -def safe_system(cmd, env=None): - if env is None: - env = os.environ.copy() - logging.debug("exec command: %s" % cmd) - - if 'CMAKI_PRINT' in env: - try: - return subprocess.call('{}'.format(cmd), env=env, shell=True) - except OSError as e: - logging.warning(str(e)) - return -1 - else: - p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines=True, env=env) - data, err = p.communicate() - data = [line for line in data.split('\n')] - if p.returncode != 0: - logging.error("begin@output: %s" % cmd) - for line in data: - if p.returncode != 0: - logging.warning(line) - else: - logging.debug(line) - if p.returncode != 0: - logging.error("end@output: %s" % cmd) - return p.returncode - - -if __name__ == '__main__': - print(rec_glob('.', '*.yml')) - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml deleted file mode 100644 index cf179bc..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: c -services: docker -os: linux -env: - - IMAGE=linux-x64 - # - IMAGE=windows-x86 - - IMAGE=windows-x64 - # - IMAGE=linux-x86 - - IMAGE=android-arm -# - IMAGE=browser-asmjs -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/docker.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt deleted file mode 100644 index 5cd8b41..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/CMakeLists.txt +++ /dev/null @@ -1,6 +0,0 @@ -project(cmaki_identifier_project CXX) -cmake_minimum_required(VERSION 3.0) -set(CMAKE_CXX_STANDARD 14) -include_directories(boostorg_predef/include) -enable_testing() -add_subdirectory(tests) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md deleted file mode 100644 index e49baa2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# identify your platform - -gcc 4.9 / clang 3.6: [![Build Status](https://travis-ci.org/makiolo/cmaki_identifier.svg?branch=master)](https://travis-ci.org/makiolo/cmaki_identifier) - -MSVC 2015: [![Build status](https://ci.appveyor.com/api/projects/status/tljl8xip6m8joi86?svg=true)](https://ci.appveyor.com/project/makiolo/cmaki-identifier) - -## travis: -- linux_64_glibc_2.19-gcc_4-debug -- linux_64_glibc_2.19-gcc_4-release -- linux_64_glibc_2.19-clang_3-debug -- linux_64_glibc_2.19-clang_3-release -- macos_64-clang_7-debug -- macos_64-clang_7-release - -## appveyor: -- windows_32-msvc_2015-debug -- windows_32-msvc_2015-release -- windows_64-msvc_2015-debug -- windows_64-msvc_2015-release diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh deleted file mode 100644 index ebffa54..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_emulator.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# if [ $# -e 0 ]; then -# echo $0: [ERROR], usage: ./cmaki_emulator.sh -# exit 1 -# fi - -export DIRPROGRAM="$( cd "$( dirname "$1" )" >/dev/null && pwd )" -export BASENAMEPROGRAM=$(basename "$1") -export CMAKI_PWD="${CMAKI_PWD:-$(pwd)}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" -export LD_LIBRARY_PATH=$(pwd):$LD_LIBRARY_PATH - -if [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x86" ]]; then - cd ${DIRPROGRAM} - wine ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/windows-x64" ]]; then - cd ${DIRPROGRAM} - wine ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/android-arm" ]]; then - cd ${DIRPROGRAM} - unset LD_LIBRARY_PATH - qemu-arm -L /usr/arm-linux-gnueabi ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv6" ]]; then - cd ${DIRPROGRAM} - qemu-arm ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/linux-armv7" ]]; then - cd ${DIRPROGRAM} - qemu-arm ./$BASENAMEPROGRAM "${@:2}" -elif [[ "$DEFAULT_DOCKCROSS_IMAGE" = "makiolo/browser-asmjs" ]]; then - cd ${DIRPROGRAM} - nodejs ./$BASENAMEPROGRAM "${@:2}" -else - $CMAKI_EMULATOR "$1" "${@:2}" -fi - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake deleted file mode 100644 index 7a50cc9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(PLATFORM "") -set(dirscript ${CMAKE_CURRENT_LIST_DIR}) -IF(WIN32) - set(executable cmaki_identifier.exe) -else() - set(executable cmaki_identifier.sh) -endif() -execute_process(COMMAND ${dirscript}/${executable} - OUTPUT_VARIABLE PLATFORM - OUTPUT_STRIP_TRAILING_WHITESPACE) -MESSAGE("${PLATFORM}") - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh deleted file mode 100755 index 371107b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/cmaki_identifier.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -export DIRSCRIPT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_PWD="${CMAKI_PWD:-$DIRSCRIPT}/.." -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export CMAKI_EMULATOR="${CMAKI_EMULATOR:-}" - -if [ -f "cmaki_identifier.exe" ]; then - $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier.exe -else - $DIRSCRIPT/cmaki_emulator.sh $CMAKI_INSTALL/cmaki_identifier -fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do deleted file mode 100644 index 4452ece..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/npm-do +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -function npm-do { (PATH=$(npm bin):$PATH; eval $@;) } -# set -x PATH ./node_modules/.bin $PATH diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json deleted file mode 100644 index ecdd629..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "cmaki_identifier", - "version": "1.0.0", - "description": "identify your platform", - "scripts": { - "clean": "cmaki clean", - "setup": "cmaki setup", - "compile": "cmaki compile", - "install": "cmaki setup && cmaki compile", - "test": "cmaki test", - "upload": "cmaki upload" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/makiolo/cmaki_identifier.git" - }, - "keywords": [ - "c++", - "identifier" - ], - "author": "Ricardo Marmolejo García", - "license": "MIT", - "bugs": { - "url": "https://github.com/makiolo/cmaki_identifier/issues" - }, - "homepage": "https://github.com/makiolo/cmaki_identifier#readme", - "devDependencies": { - "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd deleted file mode 100644 index 36bd277..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd +++ /dev/null @@ -1,7 +0,0 @@ -@echo off -if exist "boostorg_predef" ( - rmdir /s /q boostorg_predef -) -git clone -q https://github.com/boostorg/predef.git boostorg_predef - -..\cmaki_scripts\setup.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh deleted file mode 100644 index 4e1af5c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/setup.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -if [ -d "boostorg_predef" ]; then - rm -Rf boostorg_predef -fi -git clone -q https://github.com/boostorg/predef.git boostorg_predef - -../cmaki_scripts/setup.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt deleted file mode 100644 index b806a9b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt +++ /dev/null @@ -1,33 +0,0 @@ -if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") -endif() - -add_executable(cmaki_identifier cmaki_identifier.cpp) - -install(TARGETS cmaki_identifier DESTINATION $ENV{CMAKI_INSTALL}) -install(FILES ../cmaki_identifier.cmake DESTINATION $ENV{CMAKI_INSTALL}) -install(PROGRAMS ../cmaki_identifier.sh DESTINATION $ENV{CMAKI_INSTALL}) -install(PROGRAMS ../cmaki_emulator.sh DESTINATION $ENV{CMAKI_INSTALL}) -add_test( - NAME all - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME os - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME arch - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME compiler - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -set_tests_properties(os PROPERTIES ENVIRONMENT "CMAKI_INFO=OS") -set_tests_properties(arch PROPERTIES ENVIRONMENT "CMAKI_INFO=ARCH") -set_tests_properties(compiler PROPERTIES ENVIRONMENT "CMAKI_INFO=COMPILER") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp deleted file mode 100644 index 6cb91e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp +++ /dev/null @@ -1,345 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#ifdef __EMSCRIPTEN__ -#include -#endif - -#define STR_HELPER(x) #x -#define STR(x) STR_HELPER(x) - -#ifdef _WIN32 - -// problems with variadic in windows -std::string get_environment(const char* varname, const char* default_) -{ - char* varname_str = getenv(varname); - std::string value_str; - if(varname_str == NULL) - value_str = default_; - else - value_str = varname_str; - return value_str; -} - -#else - -template -std::string get_environment(T default_) -{ - return default_; -} - -template -std::string get_environment(T varname, Args ... others) -{ - char* varname_str = getenv(varname); - std::string value_str; - if(varname_str == NULL) - value_str = get_environment(others...); - else - value_str = varname_str; - return value_str; -} - -#endif - -int main() -{ -#ifdef __EMSCRIPTEN__ - #define OPERATIVE_SYSTEM "javascript" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_WINDOWS - #define OPERATIVE_SYSTEM "windows" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_ANDROID - #define OPERATIVE_SYSTEM "android" - #define OPERATIVE_RESTRICTION "_api_" STR(__ANDROID_API__) -#elif BOOST_OS_LINUX - #define OPERATIVE_SYSTEM "linux" - #ifdef __GLIBC__ - #define OPERATIVE_RESTRICTION "_glibc_" STR(__GLIBC__) "." STR(__GLIBC_MINOR__) - #else - #define OPERATIVE_RESTRICTION "" - #endif -#elif BOOST_OS_MACOS - #define OPERATIVE_SYSTEM "macos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_AIX - #define OPERATIVE_SYSTEM "aix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_AMIGAOS - #define OPERATIVE_SYSTEM "amigaos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_BEOS - #define OPERATIVE_SYSTEM "beos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_BSD - #if BOOST_OS_BSD_DRAGONFLY - #define OPERATIVE_SYSTEM "dragonfly_bsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_FREE - #define OPERATIVE_SYSTEM "freebsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_BSDI - #define OPERATIVE_SYSTEM "bsdios" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_NET - #define OPERATIVE_SYSTEM "netbsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_OPEN - #define OPERATIVE_SYSTEM "openbsd" - #define OPERATIVE_RESTRICTION "" - #endif -#elif BOOST_OS_CYGWIN - #define OPERATIVE_SYSTEM "cygwin" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_HPUX - #define OPERATIVE_SYSTEM "hpux" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_IRIX - #define OPERATIVE_SYSTEM "irix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_OS400 - #define OPERATIVE_SYSTEM "os400" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_QNX - #define OPERATIVE_SYSTEM "qnx" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_SOLARIS - #define OPERATIVE_SYSTEM "solaris" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_UNIX - #define OPERATIVE_SYSTEM "unix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_SVR4 - #define OPERATIVE_SYSTEM "svr4" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_VMS - #define OPERATIVE_SYSTEM "vms" - #define OPERATIVE_RESTRICTION "" -#else - #define OPERATIVE_SYSTEM "unknown_so" - #define OPERATIVE_RESTRICTION "" -#endif - -#if BOOST_ARCH_X86 - #if BOOST_ARCH_X86_32 - #define ARCHITECTURE "32" - #elif BOOST_ARCH_X86_64 - #define ARCHITECTURE "64" - #else - #define ARCHITECTURE "unknown_arch" - #endif -#elif BOOST_ARCH_ARM - #define ARCHITECTURE "arm" -#elif BOOST_ARCH_ALPHA - #define ARCHITECTURE "alpha" -#elif BOOST_ARCH_BLACKFIN - #define ARCHITECTURE "blackfin" -#elif BOOST_ARCH_CONVEX - #define ARCHITECTURE "convex" -#elif BOOST_ARCH_IA64 - #define ARCHITECTURE "ia64" -#elif BOOST_ARCH_M68K - #define ARCHITECTURE "m68k" -#elif BOOST_ARCH_MIPS - #define ARCHITECTURE "mips" -#elif BOOST_ARCH_PARISK - #define ARCHITECTURE "parisk" -#elif BOOST_ARCH_PPC - #define ARCHITECTURE "ppc" -#elif BOOST_ARCH_PYRAMID - #define ARCHITECTURE "pyramid" -#elif BOOST_ARCH_RS6000 - #define ARCHITECTURE "rs6000" -#elif BOOST_ARCH_SPARC - #define ARCHITECTURE "sparc" -#elif BOOST_ARCH_SH - #define ARCHITECTURE "sh" -#elif BOOST_ARCH_SYS370 - #define ARCHITECTURE "sys370" -#elif BOOST_ARCH_SYS390 - #define ARCHITECTURE "sys390" -#elif BOOST_ARCH_Z - #define ARCHITECTURE "z" -#else - #define ARCHITECTURE "unknown_arch" -#endif - -#if BOOST_COMP_MSVC - #define COMPILER "msvc" - #if _MSC_VER == 1911 - #define COMPILER_RESTRICTION "_2017" - #elif _MSC_VER == 1910 - #define COMPILER_RESTRICTION "_2017" - #elif _MSC_VER == 1900 - #define COMPILER_RESTRICTION "_2015" - #elif _MSC_VER == 1800 - #define COMPILER_RESTRICTION "_2013" - #elif _MSC_VER == 1700 - #define COMPILER_RESTRICTION "_2012" - #elif _MSC_VER == 1600 - #define COMPILER_RESTRICTION "_2010" - #elif _MSC_VER == 1500 - #define COMPILER_RESTRICTION "_2008" - #elif _MSC_VER == 1400 - #define COMPILER_RESTRICTION "_2005" - #elif _MSC_VER == 1310 - #define COMPILER_RESTRICTION "_2003" - #else - #define COMPILER_RESTRICTION "_msc_ver_" STR(_MSC_VER) - #endif -#elif BOOST_COMP_GNUC - #define COMPILER "gcc" - #define COMPILER_RESTRICTION "_" STR(__GNUC__) -#elif BOOST_COMP_CLANG - #define COMPILER "clang" - #define COMPILER_RESTRICTION "_" STR(__clang_major__) -#elif BOOST_COMP_BORLAND - #define COMPILER "borland" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_COMO - #define COMPILER "comeau" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DEC - #define COMPILER "dec" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DIAB - #define COMPILER "diab" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DMC - #define COMPILER "dmc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SYSC - #define COMPILER "sysc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_EDG - #define COMPILER "edg" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PATH - #define COMPILER "path" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_GCCXML - #define COMPILER "gccxml" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_GHS - #define COMPILER "ghs" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_HPACC - #define COMPILER "hpacc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_IAR - #define COMPILER "iar" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_IBM - #define COMPILER "ibm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_INTEL - #define COMPILER "intel" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_KCC - #define COMPILER "kcc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_LLVM - #define COMPILER "llvm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_HIGHC - #define COMPILER "highc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MWERKS - #define COMPILER "mwerks" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MRI - #define COMPILER "mri" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MPW - #define COMPILER "mrw" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PALM - #define COMPILER "palm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PGI - #define COMPILER "pgi" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SGI - #define COMPILER "sgi" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SUNPRO - #define COMPILER "sunpro" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_TENDRA - #define COMPILER "tendra" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_WATCOM - #define COMPILER "watcom" - #define COMPILER_RESTRICTION "" -#else - #define COMPILER "unknown_compiler" - #define COMPILER_RESTRICTION "" -#endif - - // structure (3 chunks joined with "-"): - // 1. platform (2 or 3 chunks joined with "_") - // 1.1. operative system (string but forbidden "_" and "-") - // 1.2. architecture (string but forbidden "_" and "-") - // 1.3. (optional) operative system restriction (is explanation and version joined with "_") - // 1.3.1. what is this restriction (string but forbidden "_" and "-") - // 1.3.2. version (1-4 chunks joined with ".") - // 2. compiler (1 or 2 chunks joined with "_") - // 2.1. compiler (string but forbidden "_" and "-") - // 2.2. (optional) compiler restriction (is version) - // 2.2.1. version (1-4 chunks joined with ".") - // 3. build mode (1 or 2 chunks joined with "_") - // 3.1. build_mode (string but forbidden "_" and "-") - // 3.2. (optional) build mode restrictions - - std::string build_mode = get_environment("MODE", "Debug"); - std::string cmaki_entropy = get_environment("CMAKI_ENTROPY", ""); - std::string cmaki_info = get_environment("CMAKI_INFO", "ALL"); - - std::transform(build_mode.begin(), build_mode.end(), build_mode.begin(), ::tolower); - std::transform(cmaki_entropy.begin(), cmaki_entropy.end(), cmaki_entropy.begin(), ::tolower); - - // TODO: mas consultas - // Arquitectura, sólo el numero: 32 o 64 - // Compilador: COMPILER + COMPILER_RESTRICTION - // Todo: OPERATIVE_SYSTEM + "_" + ARCHITECTURE + OPERATIVE_RESTRICTION + "-" + COMPILER + COMPILER_RESTRICTION + "-" + build_mode + cmaki_entropy - if(cmaki_info == "OS") - { - std::cout << OPERATIVE_SYSTEM - << std::endl; - } - else if(cmaki_info == "ARCH") - { - std::cout << ARCHITECTURE - << std::endl; - } - else if(cmaki_info == "COMPILER") - { - std::cout << COMPILER - << COMPILER_RESTRICTION - << std::endl; - } - else // if(cmaki_info == "ALL") - { - std::cout << OPERATIVE_SYSTEM - << "_" << ARCHITECTURE - << OPERATIVE_RESTRICTION - << "-" << COMPILER - << COMPILER_RESTRICTION - << "-" << build_mode; - if(cmaki_entropy.length() > 0) - { - std::cout << "-" << cmaki_entropy; - } - std::cout << std::endl; - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml deleted file mode 100644 index 44de95c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: c -services: docker -os: linux -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE deleted file mode 100644 index 53546c1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md deleted file mode 100644 index e227c42..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# cmaki_scripts -scripts for cmaki: compile, tests, upload .... - -# windows problems -``` -$ set PATH=%CD%\node_modules\cmaki_scripts;%PATH% -$ echo %PATHEXT% -.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC;.PY;.JS; -``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd deleted file mode 100644 index 72202c8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd +++ /dev/null @@ -1,15 +0,0 @@ -@echo off -IF EXIST node_modules\cmaki ( - echo . -) else ( - md node_modules\cmaki - cd node_modules && git clone -q https://github.com/makiolo/cmaki.git && cd .. - cd node_modules/cmaki && rm -Rf .git && cd ..\.. -) -IF EXIST node_modules\cmaki_generator ( - echo . -) else ( - md node_modules\cmaki_generator - cd node_modules && git clone -q https://github.com/makiolo/cmaki_generator.git && cd .. - cd node_modules/cmaki_generator && rm -Rf .git && cd ..\.. -) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd deleted file mode 100644 index 0a2db63..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd +++ /dev/null @@ -1,40 +0,0 @@ -@echo off - -echo [0/3] preinstall -set PATH=%CMAKI_PWD%\node_modules\cmaki_scripts;%PATH% -env | sort - -powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/cmaki_depends.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" -if %errorlevel% neq 0 exit /b %errorlevel% - -if exist package.json ( - - echo [1/3] prepare - :: call ncu -u - npm cache clean --force - - echo [2/3] compile - npm install - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [3/3] run tests - npm test - if %errorlevel% neq 0 exit /b %errorlevel% - -) else ( - - echo [1/3] prepare - if exist node_modules\cmaki_scripts (rmdir /s /q node_modules\cmaki_scripts) - powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/bootstrap.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [2/3] compile - call node_modules\cmaki_scripts\install.cmd - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [3/3] run tests - call node_modules\cmaki_scripts\test.cmd - if %errorlevel% neq 0 exit /b %errorlevel% - -) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh deleted file mode 100644 index 066caae..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/ci.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash -set -e - -export NPP_CACHE="${NPP_CACHE:-FALSE}" - -env | sort - -if [[ -d "bin" ]]; then - rm -Rf bin -fi - -if [[ -d "artifacts" ]]; then - rm -Rf artifacts -fi - -if [[ -d "node_modules" ]]; then - rm -Rf node_modules -fi - -if [ -f "artifacts.json" ]; then - rm artifacts.json -fi - -if [ -f "package.json" ]; then - - echo [1/2] compile - npm install - - echo [2/2] run tests - npm test -else - echo [1/2] compile - ./node_modules/cmaki_scripts/setup.sh && ./node_modules/cmaki_scripts/compile.sh - - echo [2/2] run tests - ./node_modules/cmaki_scripts/test.sh -fi - -if [ -f "cmaki.yml" ]; then - echo [3/3] upload artifact - if [ -f "package.json" ]; then - npm run upload - else - ./node_modules/cmaki_scripts/upload.sh - fi -fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd deleted file mode 100644 index 5f83632..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -rd /s /q artifacts 2> NUL -rd /s /q coverage 2> NUL diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh deleted file mode 100755 index b204603..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/clean.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CC="${CC:-gcc}" -export MODE="${MODE:-Debug}" -export COMPILER_BASENAME=$(basename ${CC}) - -if [ -d $COMPILER_BASENAME/$MODE ]; then - rm -Rf $COMPILER_BASENAME/$MODE -fi -if [ "$NPP_CACHE" == "FALSE" ]; then - rm -Rf artifacts 2> /dev/null -fi -rm -Rf coverage 2> /dev/null -rm -Rf gcc 2> /dev/null -rm -Rf clang 2> /dev/null - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd deleted file mode 100644 index 674bfd5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd +++ /dev/null @@ -1,22 +0,0 @@ -@ECHO OFF -SET DIRWORK=%~dp0 - -IF NOT EXIST "%NODE%" ( - IF DEFINED NODEHOME ( - IF EXIST "%NODEHOME%\node.exe" ( - SET NODE="%NODEHOME%\node.exe" - ) ELSE ( - ECHO Error: Missing node.exe from node home: "%NODEHOME%" - ) - ) ELSE ( - IF EXIST "C:\Program Files\nodejs\node.exe" ( - ECHO WARNING: Defaulting NODE configuration - SET NODE=C:\Program Files\nodejs\node.exe - SET NODEHOME=C:\Program Files\nodejs - ) ELSE ( - ECHO ERROR: NODE configuration unavailable! - ) - ) -) - -"%NODE%" %DIRWORK%\cmaki.js %* diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js deleted file mode 100755 index e204fd7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env node - -var os = require('os') -var fs = require('fs'); -var path = require('path') -var shelljs = require('shelljs'); -var is_win = (os.platform() === 'win32'); - -if(!process.env.CMAKI_PWD) -{ - if (fs.existsSync(path.join("..", "..", "node_modules", "npm-mas-mas"))) { - shelljs.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); - process.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); - } else { - shelljs.env['CMAKI_PWD'] = path.join(process.cwd()); - process.env['CMAKI_PWD'] = path.join(process.cwd()); - } -} -else -{ - shelljs.env['CMAKI_PWD'] = process.env['CMAKI_PWD']; -} - -if(!process.env.CMAKI_INSTALL) -{ - shelljs.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); - process.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); -} -else -{ - shelljs.env['CMAKI_INSTALL'] = process.env['CMAKI_INSTALL']; -} - -if(!process.env.NPP_SERVER) -{ - shelljs.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' - process.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' -} -else -{ - shelljs.env['NPP_SERVER'] = process.env['NPP_SERVER']; -} - -if(!process.env.NPP_CACHE) -{ - shelljs.env['NPP_CACHE'] = 'TRUE' - process.env['NPP_CACHE'] = 'TRUE' -} -else -{ - shelljs.env['NPP_CACHE'] = process.env['NPP_CACHE']; -} - -if(is_win) -{ - cmaki_identifier = 'cmaki_identifier.cmd' -} -else -{ - cmaki_identifier = 'cmaki_identifier.sh' -} - - -// no check in cmaki_identifier for avoid recursion -if( process.cwd().replace(/\\/g, "/").search("/cmaki_identifier") == -1 ) -{ - if(!fs.existsSync( path.join( process.env['CMAKI_INSTALL'], cmaki_identifier) )) - { - dir_identifier = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_identifier'); - - backup1 = shelljs.env['CMAKI_PWD']; - backup2 = process.env['CMAKI_PWD']; - - shelljs.env['CMAKI_PWD'] = dir_identifier; - process.env['CMAKI_PWD'] = dir_identifier; - - shelljs.cd( dir_identifier ); - - if (shelljs.exec('npm install').code !== 0) { - shelljs.echo('Error detecting compiler (compiling cmaki_identifier ...)'); - shelljs.exit(1); - } - - shelljs.env['CMAKI_PWD'] = backup1; - process.env['CMAKI_PWD'] = backup2; - } -} - -if(!process.env.MODE) -{ - shelljs.env['MODE'] = 'Debug'; - process.env['MODE'] = 'Debug'; -} -else -{ - shelljs.env['MODE'] = process.env['MODE']; -} - -function trim(s) -{ - return ( s || '' ).replace( /^\s+|\s+$/g, '' ); -} - -var environment_vars = []; -next_is_environment_var = false; -process.argv.forEach(function(val, index, array) -{ - if(next_is_environment_var) - { - environment_vars.push(val); - } - next_is_environment_var = (val == '-e'); -}); -environment_vars.forEach(function(val, index, array) -{ - var chunks = val.split("="); - if( chunks.length == 2 ) - { - shelljs.env[chunks[0]] = chunks[1]; - process.env[chunks[0]] = chunks[1]; - } - else - { - console.log("Error in -e with value: " + val); - } -}); - -//////////////////////////////////////////////////////////////////////////////// -// change cwd -shelljs.cd( process.env['CMAKI_PWD'] ); -//////////////////////////////////////////////////////////////////////////////// - - -var dir_script; -var script = process.argv[2]; -if (is_win) -{ - if(fs.existsSync(path.join(process.cwd(), script+".cmd"))) - { - dir_script = process.cwd(); - } - else - { - dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); - } -} -else -{ - if(fs.existsSync(path.join(process.cwd(), script+".sh"))) - { - dir_script = process.cwd(); - } - else - { - dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); - } -} - -if (is_win) -{ - script_execute = path.join(dir_script, script+".cmd"); - exists = fs.existsSync(script_execute); - caller_execute = "cmd /c "; - script_execute = script_execute.replace(/\//g, "\\"); -} -else -{ - script_execute = path.join(dir_script, script+".sh"); - exists = fs.existsSync(script_execute); - caller_execute = "bash "; - script_execute = script_execute.replace(/\\/g, "/"); -} - -console.log("Execute: " + caller_execute + script_execute); - -if(exists) -{ - var child = shelljs.exec(caller_execute + script_execute, {async:true, silent:true}, function(err, stdout, stderr) { - process.exit(err); - }); - child.stdout.on('data', function(data) { - console.log(trim(data)); - }); - child.stderr.on('data', function(data) { - console.log(trim(data)); - }); -} -else -{ - console.log("[error] dont exits: " + script_execute); - process.exit(1); -} - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd deleted file mode 100644 index 2b6cea5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd +++ /dev/null @@ -1,7 +0,0 @@ -@echo off - -pip install pyyaml -if %errorlevel% neq 0 exit /b %errorlevel% - -pip install poster -if %errorlevel% neq 0 exit /b %errorlevel% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh deleted file mode 100644 index e52dc93..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -if [[ "$OSTYPE" =~ ^linux ]]; then - curl -sL https://deb.nodesource.com/setup_8.x | sudo bash - - sudo apt install -y nodejs - sudo npm install -g npm - - # echo 'export PATH=$HOME/local/bin:$PATH' >> ~/.bashrc - # . ~/.bashrc - # mkdir ~/local - # mkdir ~/node-latest-install - # cd ~/node-latest-install - # curl http://nodejs.org/dist/node-latest.tar.gz | tar xz --strip-components=1 - # ./configure --prefix=~/local - # make install # ok, fine, this step probably takes more than 30 seconds... - # curl https://www.npmjs.org/install.sh | sh - # cd - - - sudo apt install -y lcov - sudo apt install -y cppcheck - sudo apt install -y libxaw7-dev # for OIS - sudo apt install -y libgl1-mesa-dev # flow glew - sudo apt install -y freeglut3 freeglut3-dev # for glu (needed for bullet2) - - # cmake 3.5 precompiled - DEPS_DIR=$(pwd)/deps - if [[ -d "$DEPS_DIR" ]]; then - rm -Rf $DEPS_DIR - fi - CMAKE_FILE=cmake-3.5.2-Linux-x86_64.tar.gz - CMAKE_URL=http://www.cmake.org/files/v3.5/${CMAKE_FILE} - wget ${CMAKE_URL} --quiet --no-check-certificate - mkdir -p cmake - tar -xzf ${CMAKE_FILE} -C cmake --strip-components 1 - mv cmake ${DEPS_DIR} - export PATH=${DEPS_DIR}/cmake/bin:${PATH} - cmake --version -else - /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" - brew update - brew doctor - export PATH="/usr/local/bin:$PATH" - brew install node - brew install cmake - brew install lcov - brew install cppcheck -fi -pip install --user pyyaml -pip install --user poster -pip install --user codecov diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd deleted file mode 100644 index 178869f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd +++ /dev/null @@ -1,14 +0,0 @@ -@echo off - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -echo running in mode %MODE% ... -cd %MODE% -cmake --build . --config %MODE% --target install -set lasterror=%errorlevel% -cd .. -exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh deleted file mode 100755 index 084a6ef..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/compile.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_TARGET="${CMAKI_TARGET:-install}" -export COMPILER_BASENAME=$(basename ${CC}) - -echo "running in mode $MODE ... ($COMPILER_BASENAME)" -cd $COMPILER_BASENAME/$MODE - -# CORES=$(grep -c ^processor /proc/cpuinfo) -CORES=12 -cmake --build . --config $MODE --target $CMAKI_TARGET -- -j$CORES -k VERBOSE=1 || cmake --build . --config $MODE --target $CMAKI_TARGET -- -j1 VERBOSE=1 -code=$? -exit $code diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd deleted file mode 100644 index ae010cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd +++ /dev/null @@ -1,28 +0,0 @@ -@echo off - -if DEFINED COMPILER ( - echo Using COMPILER: %COMPILER% -) else ( - set COMPILER="Visual Studio" - echo Env var COMPILER is not defined. Using by default: %COMPILER% -) - -if DEFINED COMPILER_VERSION ( - echo Using COMPILER_VERSION: %COMPILER_VERSION% -) else ( - set COMPILER_VERSION=16 - echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% -) - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -if "%NPP_CI%" == "FALSE" ( - conan install . --build missing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -) - -conan create . npm-mas-mas/testing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -tf None - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh deleted file mode 100644 index 8e84f01..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -e - -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if [ "$NPP_CI" == "FALSE" ]; then - conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -fi - -conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh deleted file mode 100755 index 2b760f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/docker.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -export IMAGE="${IMAGE:-linux-x64}" -export MODE="${MODE:-Debug}" -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export PACKAGE="${PACKAGE:-undefined}" - -docker run --rm makiolo/$IMAGE > ./dockcross-$IMAGE -sed -e "s#DEFAULT_DOCKCROSS_IMAGE=dockcross/$IMAGE#DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE#g" dockcross-$IMAGE > makiolo-$IMAGE -chmod +x ./makiolo-$IMAGE -if [ "$PACKAGE" == "undefined" ]; then - # CI - ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash' -else - # build package - ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE -e PACKAGE=$PACKAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.sh | CMAKI_INSTALL=$(pwd)/bin bash' -fi -error=$? - -# clean container -docker rmi -f makiolo/$IMAGE - -exit $error diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd deleted file mode 100644 index 7b70325..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd +++ /dev/null @@ -1,6 +0,0 @@ -@echo off -git checkout -b tmp -git checkout master -git merge master -git pull - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh deleted file mode 100755 index 48c48f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -set -e -git checkout -b tmp -git checkout master -git merge master -git pull - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh deleted file mode 100755 index ec6e0f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/init.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -PWD="`dirname \"$0\"`" - -cp -v $PWD/init/.travis.yml . -git add .travis.yml - -cp -v $PWD/init/appveyor.yml . -git add appveyor.yml - -cp -v $PWD/init/.clang-format . -git add .clang-format - -cp -v $PWD/init/.gitignore . -git add .gitignore - -cp -v $PWD/init/cmaki.yml . -git add cmaki.yml - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd deleted file mode 100644 index 3366ec8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd +++ /dev/null @@ -1,30 +0,0 @@ -@echo off - -:: IF DEFINED CMAKI_PWD ( -:: set CMAKI_PWD=%CMAKI_PWD% -:: ) else ( -:: set CMAKI_PWD=%CD% -:: ) -:: -:: IF DEFINED CMAKI_INSTALL ( -:: set CMAKI_INSTALL=%CMAKI_INSTALL% -:: ) else ( -:: set CMAKI_INSTALL=%CMAKI_PWD%/bin -:: ) - -IF DEFINED MODE ( - set MODE=%MODE% -) else ( - set MODE=Debug -) - -IF DEFINED YMLFILE ( - build --yaml=%YMLFILE% -d -) else ( - IF DEFINED PACKAGE ( - build %PACKAGE% -d - ) else ( - echo Error: must define env var YMLFILE or PACKAGE - ) -) - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh deleted file mode 100755 index a0fd049..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export PACKAGE="${PACKAGE:-undefined}" -export YMLFILE="${YMLFILE:-undefined}" - -if [ "$YMLFILE" == "undefined" ]; then - if [ "$PACKAGE" == "undefined" ]; then - echo Error: must define env var YMLFILE or PACKAGE - else - echo building $PACKAGE ... - ./build $PACKAGE --no-back-yaml --no-run-tests -d - fi -else - echo building from yaml file: ${YMLFILE} ... - ./build --yaml=${YMLFILE} --no-run-tests -d -fi diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd deleted file mode 100644 index 87c7d0c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -git push && npm publish - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh deleted file mode 100755 index c74a96f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/publish.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -git push && npm publish - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh deleted file mode 100755 index 97884f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/replace.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -MV="git mv" - -if [[ $3 == "run" ]]; -then - # do sed implace - run=" -i" -else - run="" -fi - -command="ag -w $1 -l --ignore artifacts --ignore node_modules --ignore gcc --ignore clang --ignore bin" -command_search_files="$command | grep -e $1.cpp$ -e $1.h$" -command_search_files_count="$command_search_files | xargs -I{} grep -h -e ^#include {} | grep -h $1 | wc -l" -count=$(eval $command_search_files_count) - -if [[ $count -gt 0 ]]; -then - echo "se renonbrara los siguientes ficheros (utilizando $MV):" - for file in $(eval $command_search_files); - do - destiny=$(echo $file | sed "s/\<$1\>/$2/g") - if [[ $3 == "run" ]]; - then - echo run: $MV $file $destiny - $MV $file $destiny - else - echo dry-run: $MV $file $destiny - fi - done -else - echo "No es necesario renombrar ficheros" -fi - -if [[ $3 == "run" ]]; -then - # echo run: "$command | xargs sed "s/\<$1\>/$2/g" $run" - echo replacing ... -else - echo replace in dry-run -fi -eval $command | xargs -I{} sed "s@\<$1\>@$2@g" $run {} - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd deleted file mode 100644 index 2acc40d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/run.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -call node_modules\cmaki\setup.cmd -call node_modules\cmaki\compile.cmd -call node_modules\cmaki\test.cmd - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh deleted file mode 100755 index 0e436b4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/search.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -ag -w --cpp $1 --ignore cmaki --ignore depends --ignore build --ignore cmaki_generator --ignore baul - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd deleted file mode 100644 index 8ac63c5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd +++ /dev/null @@ -1,64 +0,0 @@ -@echo off - -setlocal enableextensions - - -:: export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -:: export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if DEFINED COMPILER ( - echo Using COMPILER: %COMPILER% -) else ( - set COMPILER="Visual Studio" - echo Env var COMPILER is not defined. Using by default: %COMPILER% -) - -if DEFINED COMPILER_VERSION ( - echo Using COMPILER_VERSION: %COMPILER_VERSION% -) else ( - set COMPILER_VERSION=16 - echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% -) - -if DEFINED GENERATOR ( - echo Using Visual Studio generator: %GENERATOR% -) else ( - set GENERATOR=Visual Studio 16 2019 - echo Env var GENERATOR is not defined. Using by default: %GENERATOR% -) - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -if "%Platform%" == "x86" ( - set ARCH=x86 -) else ( - set GENERATOR=%GENERATOR% Win64 - set ARCH=x86_64 -) - -echo running in mode %COMPILER% %COMPILER_VERSION% %ARCH% %MODE% ... -if exist %MODE% (rmdir /s /q %MODE%) -md %MODE% - -:: setup -cd %MODE% - -conan install %CMAKI_PWD% --build never -s build_type=%MODE% -s arch=%ARCH% -s arch_build=%ARCH% -s compiler=%COMPILER% -s compiler.version=%COMPILER_VERSION% - -IF DEFINED Configuration ( - IF DEFINED Platform ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -G"%GENERATOR%" -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% - ) ELSE ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% - ) -) ELSE ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% -) - -set lasterror=%errorlevel% -cd %CMAKI_PWD% -exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh deleted file mode 100755 index 404e5a9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/setup.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CMAKI_GENERATOR="${CMAKI_GENERATOR:-Unix Makefiles}" -export COVERAGE="${COVERAGE:-FALSE}" -export TESTS_VALGRIND="${TESTS_VALGRIND:-FALSE}" -export COMPILER_BASENAME=$(basename ${CC}) -export CMAKE_TOOLCHAIN_FILE="${CMAKE_TOOLCHAIN_FILE:-"no cross compile"}" -export BUILD_DIR="${BUILD_DIR:-${COMPILER_BASENAME}/${MODE}}" - -if [ "$CMAKE_TOOLCHAIN_FILE" == "no cross compile" ]; then - export CMAKE_TOOLCHAIN_FILE_FILEPATH="" -else - export CMAKE_TOOLCHAIN_FILE_FILEPATH=" -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" -fi - -echo "running in mode ${MODE} ... ($COMPILER_BASENAME) (${CC} / ${CXX})" - -# setup -if [ ! -d ${BUILD_DIR} ]; then - mkdir -p ${BUILD_DIR} -fi -echo BUILD_DIR=${BUILD_DIR} -cd ${BUILD_DIR} - -if [ -f "CMakeCache.txt" ]; then - rm CMakeCache.txt -fi - -export WITH_CONAN=0 -if [ -f "$CMAKI_PWD/conanfile.txt" ] || [ -f "$CMAKI_PWD/conanfile.py" ]; then - - if [ "$NPP_CI" == "FALSE" ]; then - conan install $CMAKI_PWD --build missing -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} - fi - - echo conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} - if ! conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION}; then - echo Error conan - exit 1 - fi - export WITH_CONAN=1 -fi - -cmake $CMAKI_PWD ${CMAKE_TOOLCHAIN_FILE_FILEPATH} -DCMAKE_MODULE_PATH=${CMAKI_PWD}/node_modules/npm-mas-mas/cmaki -DCMAKE_INSTALL_PREFIX=${CMAKI_INSTALL} -DCMAKE_BUILD_TYPE=${MODE} -DFIRST_ERROR=1 -G"${CMAKI_GENERATOR}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}" -DNPP_CACHE=${NPP_CACHE} -DCOVERAGE=${COVERAGE} -DTESTS_VALGRIND=${TESTS_VALGRIND} -DWITH_CONAN=${WITH_CONAN} -code=$? -exit ${code} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd deleted file mode 100644 index 33ee4fa..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.cmd +++ /dev/null @@ -1,15 +0,0 @@ -@echo off - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -echo running in mode %MODE% ... -cd %MODE% -ctest . --no-compress-output --output-on-failure -T Test -C %MODE% -V -set lasterror=%errorlevel% -cd .. - -if %lasterror% neq 0 exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh deleted file mode 100755 index 30ddf60..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/test.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export NOCODECOV="${NOCODECOV:-FALSE}" -export COVERAGE="${COVERAGE:-FALSE}" -export CPPCHECK="${CPPCHECK:-FALSE}" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export COMPILER_BASENAME=$(basename ${CC}) - -echo "running in mode $MODE ... ($COMPILER_BASENAME)" -mkdir -p $COMPILER_BASENAME/$MODE -cd $COMPILER_BASENAME/$MODE - -# tests -ctest . --no-compress-output --output-on-failure -T Test -C $MODE -V -code=$? - -# posttests -if [ "$COVERAGE" == "TRUE" ]; then - if [[ "$CC" == "gcc" ]]; then - if [[ "$MODE" == "Debug" ]]; then - find ../.. -name "*.gcno" -o -name "*.gcda" - lcov -c -i -d ../.. -o coverage.base - # aggregate coverage - lcov -c -d ../.. -o coverage.run - # merge pre & run - lcov -d ../.. -a coverage.base -a coverage.run -o coverage.info - lcov -r coverage.info '/usr/*' -o coverage.info - lcov -r coverage.info 'tests/*' -o coverage.info - lcov -r coverage.info 'gtest/*' -o coverage.info - lcov -r coverage.info 'gmock/*' -o coverage.info - lcov -r coverage.info 'node_modules/*' -o coverage.info - # lcov -l coverage.info - genhtml --no-branch-coverage -o ../../coverage/ coverage.info - if [ "$NOCODECOV" == "FALSE" ]; then - bash <(curl -s https://codecov.io/bash) || echo "Codecov did not collect coverage reports" - fi - rm -f coverage.base coverage.run coverage.info - fi - fi -fi - -if [ "$CPPCHECK" == "TRUE" ]; then - if [[ "$CC" == "gcc" ]]; then - if [[ "$MODE" == "Debug" ]]; then - cppcheck -i ../../node_modules -i ../../$COMPILER_BASENAME --inconclusive --check-config --max-configs=10 --enable=all -UDEBUG --inline-suppr ../.. - fi - fi -fi - -exit $code diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd deleted file mode 100644 index 74063e4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd +++ /dev/null @@ -1,29 +0,0 @@ -@echo off - -IF DEFINED CMAKI_PWD ( - set CMAKI_PWD=%CMAKI_PWD% -) else ( - set CMAKI_PWD=%CD% -) - -IF DEFINED CMAKI_INSTALL ( - set CMAKI_INSTALL=%CMAKI_INSTALL% -) else ( - set CMAKI_INSTALL=%CMAKI_PWD%/bin -) - -IF DEFINED MODE ( - set MODE=%MODE% -) else ( - set MODE=Debug -) - -set YMLFILE=%CMAKI_PWD%/cmaki.yml - -:: warning, TODO: detectar si hay cambios locales y avisar -git diff %CMAKI_PWD% - -cd %CMAKI_PWD%/node_modules/cmaki_generator -curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.cmd > __make_artifact.cmd -call __make_artifact.cmd -del __make_artifact.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh deleted file mode 100755 index a088a9e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -e - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export YMLFILE=$CMAKI_PWD/cmaki.yml - -git diff $CMAKI_PWD -cd $CMAKI_PWD/node_modules/npm-mas-mas/cmaki_generator -../cmaki_scripts/make_artifact.sh - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd deleted file mode 100644 index 7d4bb06..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off - -# upload package -conan upload '*' -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh deleted file mode 100644 index f62d19d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -e - -# upload package -conan upload '*' -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml deleted file mode 100644 index 8c0ae81..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker-compose.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: '3' -services: - linux64: - build: - context: . - dockerfile: ./docker/Dockerfile.linux-x64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - - windows64: - build: - context: . - dockerfile: ./docker/Dockerfile.windows-x64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - - android64: - build: - context: . - dockerfile: ./docker/Dockerfile.android-arm64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 deleted file mode 100644 index e5b726a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 +++ /dev/null @@ -1,9 +0,0 @@ -FROM dockcross/android-arm64 -ENV PYTHONUNBUFFERED 1 -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -WORKDIR /work -ADD requirements.txt /work -RUN pip install -r requirements.txt - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 deleted file mode 100644 index 4a132bd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 +++ /dev/null @@ -1,16 +0,0 @@ -FROM dockcross/linux-x64 -ENV PYTHONUNBUFFERED 1 -RUN echo 'deb http://ftp.us.debian.org/debian testing main contrib non-free' > /etc/apt/sources.list.d/gcc.testing.list -RUN apt-get update -RUN apt-get install -y -t testing g++ -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -RUN apt install -y libgl1-mesa-dev -RUN apt install -y libx11-dev -RUN apt install -y python3-pip -WORKDIR /work -RUN pip3 install conan==1.6.1 -RUN pip3 install pyyaml==3.13 -RUN pip3 install requests==2.19.1 - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 deleted file mode 100644 index d30d465..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 +++ /dev/null @@ -1,9 +0,0 @@ -FROM dockcross/windows-x64 -ENV PYTHONUNBUFFERED 1 -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -WORKDIR /work -ADD requirements.txt /work -RUN pip install -r requirements.txt - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh deleted file mode 100755 index 122cdaf..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/docker/entrypoint.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!//bin/bash - -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if [ "$(uname)" == "Darwin" ]; then - # mac - export COMPILER=apple-clang COMPILER_VERSION=10.0 COMPILER_LIBCXX=libc++ -fi - -# compile 3rd parties -# conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -# conan create . npm-mas-mas/testing --build $PACKAGE -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None -# conan upload $PACKAGE/*@npm-mas-mas/testing -r npm-mas-mas --all -c - -# compile only $PACKAGE -conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None -conan upload *@npm-mas-mas/testing -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json deleted file mode 100644 index 72449de..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "npm-mas-mas", - "version": "0.0.1", - "description": "npm extension for use packing system with C++", - "bin": { - "cmaki": "./cmaki_scripts/cmaki.js" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/makiolo/npm-mas-mas.git" - }, - "keywords": [ - "cmake", - "c++", - "artifacts" - ], - "author": "Ricardo Marmolejo García", - "license": "MIT", - "bugs": { - "url": "https://github.com/makiolo/npm-mas-mas/issues" - }, - "homepage": "https://github.com/makiolo/npm-mas-mas#readme", - "dependencies": { - "shelljs": ">=0.8.5" - } -} - - - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile deleted file mode 100644 index 883467b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM nimmis/apache-php5 - -MAINTAINER Ricardo Marmolejo García - -RUN echo "upload_max_filesize=800M" >> /etc/php5/apache2/php.ini -RUN echo "post_max_size=800M" >> /etc/php5/apache2/php.ini -RUN echo "max_input_time=300" >> /etc/php5/apache2/php.ini -RUN echo "max_execution_time=300" >> /etc/php5/apache2/php.ini -RUN echo "error_reporting = E_ALL" >> /etc/php5/apache2/php.ini -RUN echo "display_errors = On" >> /etc/php5/apache2/php.ini - -WORKDIR /var/www/html/cpp -RUN mkdir -p /var/www/html/packages -RUN chmod -R 777 /var/www/html/packages - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile deleted file mode 100644 index efbcbeb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -all: - docker-compose up -d --build - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md deleted file mode 100644 index 31554b7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/NOTES.md +++ /dev/null @@ -1,4 +0,0 @@ -need edit php.ini: - -upload_max_filesize = 500M -post_max_size = 500M diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md deleted file mode 100644 index 7f52707..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# servfactor -- default artifacts path is $(pwd)/packages (can use symbolic links) -- chmod o+w packages/ -- chmod o+w packages/stats.txt - -# php.ini -- upload_max_filesize=800M -- post_max_size=800M -- max_input_time=300 -- max_execution_time=300 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml deleted file mode 100644 index 9d85ed5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/docker-compose.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: '3' -services: - servfactor: - build: . - volumes: - - .:/var/www/html/cpp - - ./packages:/var/www/html/packages - ports: - - "8080:80" - restart: always - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php deleted file mode 100755 index 6f536d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/download.php +++ /dev/null @@ -1,58 +0,0 @@ - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php deleted file mode 100755 index 6881558..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/index.php +++ /dev/null @@ -1,227 +0,0 @@ -= 1024 && $i < ( count( $types ) -1 ); $bytes /= 1024, $i++ ); - return( round( $bytes, 2 ) . " " . $types[$i] ); -} - -if(!$quiet_mode) -{ - -/* get disk space free (in bytes) */ -$df = disk_free_space($packages_dir); -/* and get disk space total (in bytes) */ -$dt = disk_total_space($packages_dir); -/* now we calculate the disk space used (in bytes) */ -$du = $dt - $df; -/* percentage of disk used - this will be used to also set the width % of the progress bar */ -$dp = sprintf('%.2f',($du / $dt) * 100); - -/* and we formate the size from bytes to MB, GB, etc. */ -$df = formatSize($df); -$du = formatSize($du); -$dt = formatSize($dt); - -?> - - - - -
-
% Disk Used
-
-
- - - -
-
-format("c")); - } -} -arsort($arr); -$arr = array_keys($arr); - -if(!$quiet_mode) -{ - if($dp > 95) - { - for ($i = 1; $i <= 10; $i++) { - $last_file = array_pop($arr); - if(u::ends_with($last_file, "-cmake.tar.gz")) - { - $big_file = str_replace("-cmake.tar.gz", ".tar.gz", $last_file); - if(!unlink($dir . $last_file)) - { - echo "error removing ".$last_file."
"; - } - else - { - echo "removed ".$last_file."
"; - } - - if(!unlink($dir . $big_file)) - { - echo "error removing ".$dir.$big_file."
"; - } - else - { - echo "removed ".$dir.$big_file."
"; - } - break; - } - } - } -} - -foreach($arr as $file) -{ - // bug si el package tiene "-" - if(u::ends_with($file, "-cmake.tar.gz")) - { - // $substance = $file; - - preg_match('/([\w-]+)-([0-9\.]+)-([\w-\.]+)-cmake.tar.gz/', $file, $matches); - $package = $matches[1]; - $version = $matches[2]; - $platform = $matches[3]; - - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $platform = substr($substance, strrpos($substance, "-")+1); - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $version = substr($substance, strrpos($substance, "-")+1); - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $package = $substance; - if(!isset($_REQUEST['platform']) || ($_REQUEST['platform'] == $platform)) - { - $hits_info = get_hits($data, $file); - $hits = $hits_info[0]; - $last_download = $hits_info[1]; - if($last_download === NULL) - { - if(!$quiet_mode) - { - $formatted = "never downloaded"; - } - else - { - $formatted = "---"; - } - } - else - { - if(!$quiet_mode) - { - $formatted = $last_download->format("d-m-Y H:i"); - } - else - { - $formatted = $last_download->format("c"); - } - } - if(!$quiet_mode) - { - echo "" . $package ." (" . $version . ") "; - if($hits > 0) - { - echo "$platform (".$hits." hits, last use: ".$formatted.")"; - } - else - { - echo "$platform (".$hits." hits)"; - } - echo "
"; - } - else - { - print $package.";".$version.";".$platform.";"."download.php?file=".$file.";".$hits.";".$formatted."\n"; - } - } - } -} - -?> - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md deleted file mode 100644 index 734fc3d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/packages/README.md +++ /dev/null @@ -1,2 +0,0 @@ -packages dir - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php deleted file mode 100644 index 1ab9900..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/stats.php +++ /dev/null @@ -1,68 +0,0 @@ -
"; - }; - $f = fopen($stats, 'r'); - $data = fread($f, filesize($stats)); - $data = unserialize($data); - fclose($f); - } - else - { - $data = array(); - } - - return $data; -} - -function inc_stats($data, $key) -{ - $key = basename($key); - - if(array_key_exists($key, $data)) - { - $data[$key][0] = $data[$key][0] + 1; - $data[$key][1] = new DateTime('NOW'); - } - else - { - $data[$key] = array(1, new DateTime('NOW')); - } - return $data; -} - -function get_hits($data, $key) -{ - $key = basename($key); - - if(array_key_exists($key, $data)) - { - return $data[$key]; - } - else - { - return array(0, NULL); - } -} - -function write_stats($data) -{ - global $stats; - $f = fopen($stats, 'w'); - $data = serialize($data); - fwrite($f, $data); - fclose($f); -} - -?> - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php deleted file mode 100644 index f57bc22..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/upload.php +++ /dev/null @@ -1,76 +0,0 @@ -" . $artifacts . ""; - -if(!is_writable($artifacts)) -{ - echo "I don't have permission
"; - exit(1); -} - -$uploaded_file = $artifacts . "/" . basename($_FILES['uploaded']['name']); - -// if(isset($_FILES['uploaded']) && file_exists($uploaded_file)) -// { -// echo "file: ".$uploaded_file." already esxists!"; -// exit(1); -// } - -if (move_uploaded_file($_FILES['uploaded']['tmp_name'], $uploaded_file)) -{ - echo "El fichero es valido y se subio con exito: ". $uploaded_file .".\n"; -} -else -{ -?> -
- Enviar este fichero: - -
-
- - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php deleted file mode 100755 index ac69f78..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-mas-mas/servfactor/util.php +++ /dev/null @@ -1,2584 +0,0 @@ - - * @link http://github.com/brandonwamboldt/utilphp/ Official Documentation - */ -class util -{ - /** - * A constant representing the number of seconds in a minute, for - * making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_MINUTE = 60; - - /** - * A constant representing the number of seconds in an hour, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_HOUR = 3600; - const SECONDS_IN_AN_HOUR = 3600; - - /** - * A constant representing the number of seconds in a day, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_DAY = 86400; - - /** - * A constant representing the number of seconds in a week, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_WEEK = 604800; - - /** - * A constant representing the number of seconds in a month (30 days), - * for making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_MONTH = 2592000; - - /** - * A constant representing the number of seconds in a year (365 days), - * for making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_YEAR = 31536000; - - /** - * URL constants as defined in the PHP Manual under "Constants usable with - * http_build_url()". - * - * @see http://us2.php.net/manual/en/http.constants.php#http.constants.url - */ - const HTTP_URL_REPLACE = 1; - const HTTP_URL_JOIN_PATH = 2; - const HTTP_URL_JOIN_QUERY = 4; - const HTTP_URL_STRIP_USER = 8; - const HTTP_URL_STRIP_PASS = 16; - const HTTP_URL_STRIP_AUTH = 32; - const HTTP_URL_STRIP_PORT = 64; - const HTTP_URL_STRIP_PATH = 128; - const HTTP_URL_STRIP_QUERY = 256; - const HTTP_URL_STRIP_FRAGMENT = 512; - const HTTP_URL_STRIP_ALL = 1024; - - /** - * A collapse icon, using in the dump_var function to allow collapsing - * an array or object - * - * @var string - */ - public static $icon_collapse = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFNzFDNDQyNEMyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFNzFDNDQyM0MyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NDlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuF4AWkAAAA2UExURU9t2DBStczM/1h16DNmzHiW7iNFrypMvrnD52yJ4ezs7Onp6ejo6P///+Tk5GSG7D9h5SRGq0Q2K74AAAA/SURBVHjaLMhZDsAgDANRY3ZISnP/y1ZWeV+jAeuRSky6cKL4ryDdSggP8UC7r6GvR1YHxjazPQDmVzI/AQYAnFQDdVSJ80EAAAAASUVORK5CYII='; - - /** - * A collapse icon, using in the dump_var function to allow collapsing - * an array or object - * - * @var string - */ - public static $icon_expand = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFQzZERTJDNEMyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFQzZERTJDM0MyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3MzlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PkmDvWIAAABIUExURU9t2MzM/3iW7ubm59/f5urq85mZzOvr6////9ra38zMzObm5rfB8FZz5myJ4SNFrypMvjBStTNmzOvr+mSG7OXl8T9h5SRGq/OfqCEAAABKSURBVHjaFMlbEoAwCEPRULXF2jdW9r9T4czcyUdA4XWB0IgdNSybxU9amMzHzDlPKKu7Fd1e6+wY195jW0ARYZECxPq5Gn8BBgCr0gQmxpjKAwAAAABJRU5ErkJggg=='; - - private static $hasArray = false; - - /** - * Map of special non-ASCII characters and suitable ASCII replacement - * characters. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - public static $maps = array( - 'de' => array(/* German */ - 'Ä' => 'Ae', 'Ö' => 'Oe', 'Ü' => 'Ue', 'ä' => 'ae', 'ö' => 'oe', 'ü' => 'ue', 'ß' => 'ss', - 'ẞ' => 'SS' - ), - 'latin' => array( - 'À' => 'A', 'Á' => 'A', 'Â' => 'A', 'Ã' => 'A', 'Ä' => 'A', 'Å' => 'A','Ă' => 'A', 'Æ' => 'AE', 'Ç' => - 'C', 'È' => 'E', 'É' => 'E', 'Ê' => 'E', 'Ë' => 'E', 'Ì' => 'I', 'Í' => 'I', 'Î' => 'I', - 'Ï' => 'I', 'Ð' => 'D', 'Ñ' => 'N', 'Ò' => 'O', 'Ó' => 'O', 'Ô' => 'O', 'Õ' => 'O', 'Ö' => - 'O', 'Ő' => 'O', 'Ø' => 'O','Ș' => 'S','Ț' => 'T', 'Ù' => 'U', 'Ú' => 'U', 'Û' => 'U', 'Ü' => 'U', 'Ű' => 'U', - 'Ý' => 'Y', 'Þ' => 'TH', 'ß' => 'ss', 'à' => 'a', 'á' => 'a', 'â' => 'a', 'ã' => 'a', 'ä' => - 'a', 'å' => 'a', 'ă' => 'a', 'æ' => 'ae', 'ç' => 'c', 'è' => 'e', 'é' => 'e', 'ê' => 'e', 'ë' => 'e', - 'ì' => 'i', 'í' => 'i', 'î' => 'i', 'ï' => 'i', 'ð' => 'd', 'ñ' => 'n', 'ò' => 'o', 'ó' => - 'o', 'ô' => 'o', 'õ' => 'o', 'ö' => 'o', 'ő' => 'o', 'ø' => 'o', 'ș' => 's', 'ț' => 't', 'ù' => 'u', 'ú' => 'u', - 'û' => 'u', 'ü' => 'u', 'ű' => 'u', 'ý' => 'y', 'þ' => 'th', 'ÿ' => 'y' - ), - 'latin_symbols' => array( - '©' => '(c)', - '®' => '(r)' - ), - 'el' => array(/* Greek */ - 'α' => 'a', 'β' => 'b', 'γ' => 'g', 'δ' => 'd', 'ε' => 'e', 'ζ' => 'z', 'η' => 'h', 'θ' => '8', - 'ι' => 'i', 'κ' => 'k', 'λ' => 'l', 'μ' => 'm', 'ν' => 'n', 'ξ' => '3', 'ο' => 'o', 'π' => 'p', - 'ρ' => 'r', 'σ' => 's', 'τ' => 't', 'υ' => 'y', 'φ' => 'f', 'χ' => 'x', 'ψ' => 'ps', 'ω' => 'w', - 'ά' => 'a', 'έ' => 'e', 'ί' => 'i', 'ό' => 'o', 'ύ' => 'y', 'ή' => 'h', 'ώ' => 'w', 'ς' => 's', - 'ϊ' => 'i', 'ΰ' => 'y', 'ϋ' => 'y', 'ΐ' => 'i', - 'Α' => 'A', 'Β' => 'B', 'Γ' => 'G', 'Δ' => 'D', 'Ε' => 'E', 'Ζ' => 'Z', 'Η' => 'H', 'Θ' => '8', - 'Ι' => 'I', 'Κ' => 'K', 'Λ' => 'L', 'Μ' => 'M', 'Ν' => 'N', 'Ξ' => '3', 'Ο' => 'O', 'Π' => 'P', - 'Ρ' => 'R', 'Σ' => 'S', 'Τ' => 'T', 'Υ' => 'Y', 'Φ' => 'F', 'Χ' => 'X', 'Ψ' => 'PS', 'Ω' => 'W', - 'Ά' => 'A', 'Έ' => 'E', 'Ί' => 'I', 'Ό' => 'O', 'Ύ' => 'Y', 'Ή' => 'H', 'Ώ' => 'W', 'Ϊ' => 'I', - 'Ϋ' => 'Y' - ), - 'tr' => array(/* Turkish */ - 'ş' => 's', 'Ş' => 'S', 'ı' => 'i', 'İ' => 'I', 'ç' => 'c', 'Ç' => 'C', 'ü' => 'u', 'Ü' => 'U', - 'ö' => 'o', 'Ö' => 'O', 'ğ' => 'g', 'Ğ' => 'G' - ), - 'ru' => array(/* Russian */ - 'а' => 'a', 'б' => 'b', 'в' => 'v', 'г' => 'g', 'д' => 'd', 'е' => 'e', 'ё' => 'yo', 'ж' => 'zh', - 'з' => 'z', 'и' => 'i', 'й' => 'j', 'к' => 'k', 'л' => 'l', 'м' => 'm', 'н' => 'n', 'о' => 'o', - 'п' => 'p', 'р' => 'r', 'с' => 's', 'т' => 't', 'у' => 'u', 'ф' => 'f', 'х' => 'h', 'ц' => 'c', - 'ч' => 'ch', 'ш' => 'sh', 'щ' => 'sh', 'ъ' => '', 'ы' => 'y', 'ь' => '', 'э' => 'e', 'ю' => 'yu', - 'я' => 'ya', - 'А' => 'A', 'Б' => 'B', 'В' => 'V', 'Г' => 'G', 'Д' => 'D', 'Е' => 'E', 'Ё' => 'Yo', 'Ж' => 'Zh', - 'З' => 'Z', 'И' => 'I', 'Й' => 'J', 'К' => 'K', 'Л' => 'L', 'М' => 'M', 'Н' => 'N', 'О' => 'O', - 'П' => 'P', 'Р' => 'R', 'С' => 'S', 'Т' => 'T', 'У' => 'U', 'Ф' => 'F', 'Х' => 'H', 'Ц' => 'C', - 'Ч' => 'Ch', 'Ш' => 'Sh', 'Щ' => 'Sh', 'Ъ' => '', 'Ы' => 'Y', 'Ь' => '', 'Э' => 'E', 'Ю' => 'Yu', - 'Я' => 'Ya', - '№' => '' - ), - 'uk' => array(/* Ukrainian */ - 'Є' => 'Ye', 'І' => 'I', 'Ї' => 'Yi', 'Ґ' => 'G', 'є' => 'ye', 'і' => 'i', 'ї' => 'yi', 'ґ' => 'g' - ), - 'cs' => array(/* Czech */ - 'č' => 'c', 'ď' => 'd', 'ě' => 'e', 'ň' => 'n', 'ř' => 'r', 'š' => 's', 'ť' => 't', 'ů' => 'u', - 'ž' => 'z', 'Č' => 'C', 'Ď' => 'D', 'Ě' => 'E', 'Ň' => 'N', 'Ř' => 'R', 'Š' => 'S', 'Ť' => 'T', - 'Ů' => 'U', 'Ž' => 'Z' - ), - 'pl' => array(/* Polish */ - 'ą' => 'a', 'ć' => 'c', 'ę' => 'e', 'ł' => 'l', 'ń' => 'n', 'ó' => 'o', 'ś' => 's', 'ź' => 'z', - 'ż' => 'z', 'Ą' => 'A', 'Ć' => 'C', 'Ę' => 'e', 'Ł' => 'L', 'Ń' => 'N', 'Ó' => 'O', 'Ś' => 'S', - 'Ź' => 'Z', 'Ż' => 'Z' - ), - 'ro' => array(/* Romanian */ - 'ă' => 'a', 'â' => 'a', 'î' => 'i', 'ș' => 's', 'ț' => 't', 'Ţ' => 'T', 'ţ' => 't' - ), - 'lv' => array(/* Latvian */ - 'ā' => 'a', 'č' => 'c', 'ē' => 'e', 'ģ' => 'g', 'ī' => 'i', 'ķ' => 'k', 'ļ' => 'l', 'ņ' => 'n', - 'š' => 's', 'ū' => 'u', 'ž' => 'z', 'Ā' => 'A', 'Č' => 'C', 'Ē' => 'E', 'Ģ' => 'G', 'Ī' => 'i', - 'Ķ' => 'k', 'Ļ' => 'L', 'Ņ' => 'N', 'Š' => 'S', 'Ū' => 'u', 'Ž' => 'Z' - ), - 'lt' => array(/* Lithuanian */ - 'ą' => 'a', 'č' => 'c', 'ę' => 'e', 'ė' => 'e', 'į' => 'i', 'š' => 's', 'ų' => 'u', 'ū' => 'u', 'ž' => 'z', - 'Ą' => 'A', 'Č' => 'C', 'Ę' => 'E', 'Ė' => 'E', 'Į' => 'I', 'Š' => 'S', 'Ų' => 'U', 'Ū' => 'U', 'Ž' => 'Z' - ), - 'vn' => array(/* Vietnamese */ - 'Á' => 'A', 'À' => 'A', 'Ả' => 'A', 'Ã' => 'A', 'Ạ' => 'A', 'Ă' => 'A', 'Ắ' => 'A', 'Ằ' => 'A', 'Ẳ' => 'A', 'Ẵ' => 'A', 'Ặ' => 'A', 'Â' => 'A', 'Ấ' => 'A', 'Ầ' => 'A', 'Ẩ' => 'A', 'Ẫ' => 'A', 'Ậ' => 'A', - 'á' => 'a', 'à' => 'a', 'ả' => 'a', 'ã' => 'a', 'ạ' => 'a', 'ă' => 'a', 'ắ' => 'a', 'ằ' => 'a', 'ẳ' => 'a', 'ẵ' => 'a', 'ặ' => 'a', 'â' => 'a', 'ấ' => 'a', 'ầ' => 'a', 'ẩ' => 'a', 'ẫ' => 'a', 'ậ' => 'a', - 'É' => 'E', 'È' => 'E', 'Ẻ' => 'E', 'Ẽ' => 'E', 'Ẹ' => 'E', 'Ê' => 'E', 'Ế' => 'E', 'Ề' => 'E', 'Ể' => 'E', 'Ễ' => 'E', 'Ệ' => 'E', - 'é' => 'e', 'è' => 'e', 'ẻ' => 'e', 'ẽ' => 'e', 'ẹ' => 'e', 'ê' => 'e', 'ế' => 'e', 'ề' => 'e', 'ể' => 'e', 'ễ' => 'e', 'ệ' => 'e', - 'Í' => 'I', 'Ì' => 'I', 'Ỉ' => 'I', 'Ĩ' => 'I', 'Ị' => 'I', 'í' => 'i', 'ì' => 'i', 'ỉ' => 'i', 'ĩ' => 'i', 'ị' => 'i', - 'Ó' => 'O', 'Ò' => 'O', 'Ỏ' => 'O', 'Õ' => 'O', 'Ọ' => 'O', 'Ô' => 'O', 'Ố' => 'O', 'Ồ' => 'O', 'Ổ' => 'O', 'Ỗ' => 'O', 'Ộ' => 'O', 'Ơ' => 'O', 'Ớ' => 'O', 'Ờ' => 'O', 'Ở' => 'O', 'Ỡ' => 'O', 'Ợ' => 'O', - 'ó' => 'o', 'ò' => 'o', 'ỏ' => 'o', 'õ' => 'o', 'ọ' => 'o', 'ô' => 'o', 'ố' => 'o', 'ồ' => 'o', 'ổ' => 'o', 'ỗ' => 'o', 'ộ' => 'o', 'ơ' => 'o', 'ớ' => 'o', 'ờ' => 'o', 'ở' => 'o', 'ỡ' => 'o', 'ợ' => 'o', - 'Ú' => 'U', 'Ù' => 'U', 'Ủ' => 'U', 'Ũ' => 'U', 'Ụ' => 'U', 'Ư' => 'U', 'Ứ' => 'U', 'Ừ' => 'U', 'Ử' => 'U', 'Ữ' => 'U', 'Ự' => 'U', - 'ú' => 'u', 'ù' => 'u', 'ủ' => 'u', 'ũ' => 'u', 'ụ' => 'u', 'ư' => 'u', 'ứ' => 'u', 'ừ' => 'u', 'ử' => 'u', 'ữ' => 'u', 'ự' => 'u', - 'Ý' => 'Y', 'Ỳ' => 'Y', 'Ỷ' => 'Y', 'Ỹ' => 'Y', 'Ỵ' => 'Y', 'ý' => 'y', 'ỳ' => 'y', 'ỷ' => 'y', 'ỹ' => 'y', 'ỵ' => 'y', - 'Đ' => 'D', 'đ' => 'd' - ), - 'ar' => array(/* Arabic */ - 'أ' => 'a', 'ب' => 'b', 'ت' => 't', 'ث' => 'th', 'ج' => 'g', 'ح' => 'h', 'خ' => 'kh', 'د' => 'd', - 'ذ' => 'th', 'ر' => 'r', 'ز' => 'z', 'س' => 's', 'ش' => 'sh', 'ص' => 's', 'ض' => 'd', 'ط' => 't', - 'ظ' => 'th', 'ع' => 'aa', 'غ' => 'gh', 'ف' => 'f', 'ق' => 'k', 'ك' => 'k', 'ل' => 'l', 'م' => 'm', - 'ن' => 'n', 'ه' => 'h', 'و' => 'o', 'ي' => 'y' - ), - 'sr' => array(/* Serbian */ - 'ђ' => 'dj', 'ј' => 'j', 'љ' => 'lj', 'њ' => 'nj', 'ћ' => 'c', 'џ' => 'dz', 'đ' => 'dj', - 'Ђ' => 'Dj', 'Ј' => 'j', 'Љ' => 'Lj', 'Њ' => 'Nj', 'Ћ' => 'C', 'Џ' => 'Dz', 'Đ' => 'Dj' - ), - 'az' => array(/* Azerbaijani */ - 'ç' => 'c', 'ə' => 'e', 'ğ' => 'g', 'ı' => 'i', 'ö' => 'o', 'ş' => 's', 'ü' => 'u', - 'Ç' => 'C', 'Ə' => 'E', 'Ğ' => 'G', 'İ' => 'I', 'Ö' => 'O', 'Ş' => 'S', 'Ü' => 'U' - ), - 'fi' => array(/* Finnish */ - 'ä' => 'a', - 'ö' => 'o' - ), - ); - - /** - * The character map for the designated language - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $map = array(); - - /** - * The character list as a string. - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $chars = ''; - - /** - * The character list as a regular expression. - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $regex = ''; - - /** - * The current language - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $language = ''; - - /** - * Initializes the character map. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static function initLanguageMap($language = '') - { - if (count(self::$map) > 0 && (($language == '') || ($language == self::$language))) { - return; - } - - // Is a specific map associated with $language? - if (isset(self::$maps[$language]) && is_array(self::$maps[$language])) { - // Move this map to end. This means it will have priority over others - $m = self::$maps[$language]; - unset(self::$maps[$language]); - self::$maps[$language] = $m; - } - - // Reset static vars - self::$language = $language; - self::$map = array(); - self::$chars = ''; - - foreach (self::$maps as $map) { - foreach ($map as $orig => $conv) { - self::$map[$orig] = $conv; - self::$chars .= $orig; - } - } - - self::$regex = '/[' . self::$chars . ']/u'; - } - - /** - * Remove the duplicates from an array. - * - * This is faster version than the builtin array_unique(). - * - * Notes on time requirements: - * array_unique -> O(n log n) - * array_flip -> O(n) - * - * http://stackoverflow.com/questions/8321620/array-unique-vs-array-flip - * http://php.net/manual/en/function.array-unique.php - * - * @param $array - * @return $array - */ - public static function fast_array_unique($array) - { - $array = array_keys(array_flip($array)); - - return $array; - } - - /** - * Access an array index, retrieving the value stored there if it - * exists or a default if it does not. This function allows you to - * concisely access an index which may or may not exist without - * raising a warning. - * - * @param array $var Array value to access - * @param mixed $default Default value to return if the key is not - * present in the array - * @return mixed - */ - public static function array_get(&$var, $default = null) - { - if (isset($var)) { - return $var; - } - - return $default; - } - - /** - * Display a variable's contents using nice HTML formatting and will - * properly display the value of booleans as true or false - * - * @see recursiveVarDumpHelper() - * - * @param mixed $var The variable to dump - * @return string - */ - public static function var_dump($var, $return = false, $expandLevel = 1) - { - self::$hasArray = false; - $toggScript = 'var colToggle = function(toggID) {var img = document.getElementById(toggID);if (document.getElementById(toggID + "-collapsable").style.display == "none") {document.getElementById(toggID + "-collapsable").style.display = "inline";setImg(toggID, 0);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling;while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}} else {document.getElementById(toggID + "-collapsable").style.display = "none";setImg(toggID, 1);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling; while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}}};'; - $imgScript = 'var setImg = function(objID,imgID,addStyle) {var imgStore = ["data:image/png;base64,' . self::$icon_collapse . '", "data:image/png;base64,' . self::$icon_expand . '"];if (objID) {document.getElementById(objID).setAttribute("src", imgStore[imgID]);if (addStyle){document.getElementById(objID).setAttribute("style", "position:relative;left:-5px;top:-1px;cursor:pointer;");}}};'; - $jsCode = preg_replace('/ +/', ' ', ''); - $html = '
';
-        $done  = array();
-        $html .= self::recursiveVarDumpHelper($var, intval($expandLevel), 0, $done);
-        $html .= '
'; - - if (self::$hasArray) { - $html = $jsCode . $html; - } - - if (!$return) { - echo $html; - } - - return $html; - } - - /** - * Display a variable's contents using nice HTML formatting (Without - * the
 tag) and will properly display the values of variables
-     * like booleans and resources. Supports collapsable arrays and objects
-     * as well.
-     *
-     * @param  mixed $var The variable to dump
-     * @return string
-     */
-    protected static function recursiveVarDumpHelper($var, $expLevel, $depth = 0, $done = array())
-    {
-        $html = '';
-
-        if ($expLevel > 0) {
-            $expLevel--;
-            $setImg = 0;
-            $setStyle = 'display:inline;';
-        } elseif ($expLevel == 0) {
-            $setImg = 1;
-            $setStyle='display:none;';
-        } elseif ($expLevel < 0) {
-            $setImg = 0;
-            $setStyle = 'display:inline;';
-        }
-
-        if (is_bool($var)) {
-            $html .= 'bool(' . (($var) ? 'true' : 'false') . ')';
-        } elseif (is_int($var)) {
-            $html .= 'int(' . $var . ')';
-        } elseif (is_float($var)) {
-            $html .= 'float(' . $var . ')';
-        } elseif (is_string($var)) {
-            $html .= 'string(' . strlen($var) . ') "' . self::htmlentities($var) . '"';
-        } elseif (is_null($var)) {
-            $html .= 'NULL';
-        } elseif (is_resource($var)) {
-            $html .= 'resource("' . get_resource_type($var) . '") "' . $var . '"';
-        } elseif (is_array($var)) {
-            // Check for recursion
-            if ($depth > 0) {
-                foreach ($done as $prev) {
-                    if ($prev === $var) {
-                        $html .= 'array(' . count($var) . ') *RECURSION DETECTED*';
-                        return $html;
-                    }
-                }
-
-                // Keep track of variables we have already processed to detect recursion
-                $done[] = &$var;
-            }
-
-            self::$hasArray = true;
-            $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000);
-
-            $html .= (!empty($var) ? ' ' : '') . 'array(' . count($var) . ')';
-            if (!empty($var)) {
-                $html .= ' 
[
'; - - $indent = 4; - $longest_key = 0; - - foreach ($var as $key => $value) { - if (is_string($key)) { - $longest_key = max($longest_key, strlen($key) + 2); - } else { - $longest_key = max($longest_key, strlen($key)); - } - } - - foreach ($var as $key => $value) { - if (is_numeric($key)) { - $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); - } else { - $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); - } - - $html .= ' => '; - - $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); - - foreach ($value as $line => $val) { - if ($line != 0) { - $value[$line] = str_repeat(' ', $indent * 2) . $val; - } - } - - $html .= implode('
', $value) . '
'; - } - - $html .= ']
'; - } - } elseif (is_object($var)) { - // Check for recursion - foreach ($done as $prev) { - if ($prev === $var) { - $html .= 'object(' . get_class($var) . ') *RECURSION DETECTED*'; - return $html; - } - } - - // Keep track of variables we have already processed to detect recursion - $done[] = &$var; - - self::$hasArray=true; - $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000); - - $html .= ' object(' . get_class($var) . ')
[
'; - - $varArray = (array) $var; - - $indent = 4; - $longest_key = 0; - - foreach ($varArray as $key => $value) { - if (substr($key, 0, 2) == "\0*") { - unset($varArray[$key]); - $key = 'protected:' . substr($key, 3); - $varArray[$key] = $value; - } elseif (substr($key, 0, 1) == "\0") { - unset($varArray[$key]); - $key = 'private:' . substr($key, 1, strpos(substr($key, 1), "\0")) . ':' . substr($key, strpos(substr($key, 1), "\0") + 2); - $varArray[$key] = $value; - } - - if (is_string($key)) { - $longest_key = max($longest_key, strlen($key) + 2); - } else { - $longest_key = max($longest_key, strlen($key)); - } - } - - foreach ($varArray as $key => $value) { - if (is_numeric($key)) { - $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); - } else { - $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); - } - - $html .= ' => '; - - $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); - - foreach ($value as $line => $val) { - if ($line != 0) { - $value[$line] = str_repeat(' ', $indent * 2) . $val; - } - } - - $html .= implode('
', $value) . '
'; - } - - $html .= ']
'; - } - - return $html; - } - - /** - * Converts any accent characters to their equivalent normal characters - * and converts any other non-alphanumeric characters to dashes, then - * converts any sequence of two or more dashes to a single dash. This - * function generates slugs safe for use as URLs, and if you pass true - * as the second parameter, it will create strings safe for use as CSS - * classes or IDs. - * - * @param string $string A string to convert to a slug - * @param string $separator The string to separate words with - * @param boolean $css_mode Whether or not to generate strings safe for - * CSS classes/IDs (Default to false) - * @return string - */ - public static function slugify($string, $separator = '-', $css_mode = false) - { - // Compatibility with 1.0.* parameter ordering for semver - if ($separator === true || $separator === false) { - $css_mode = $separator; - $separator = '-'; - - // Raise deprecation error - trigger_error( - 'util::slugify() now takes $css_mode as the third parameter, please update your code', - E_USER_DEPRECATED - ); - } - - $slug = preg_replace('/([^a-z0-9]+)/', $separator, strtolower(self::remove_accents($string))); - - if ($css_mode) { - $digits = array('zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'); - - if (is_numeric(substr($slug, 0, 1))) { - $slug = $digits[substr($slug, 0, 1)] . substr($slug, 1); - } - } - - return $slug; - } - - /** - * Checks to see if a string is utf8 encoded. - * - * NOTE: This function checks for 5-Byte sequences, UTF8 - * has Bytes Sequences with a maximum length of 4. - * - * Written by Tony Ferrara - * - * @param string $string The string to be checked - * @return boolean - */ - public static function seems_utf8($string) - { - if (function_exists('mb_check_encoding')) { - // If mbstring is available, this is significantly faster than - // using PHP regexps. - return mb_check_encoding($string, 'UTF-8'); - } - - // @codeCoverageIgnoreStart - return self::seemsUtf8Regex($string); - // @codeCoverageIgnoreEnd - } - - /** - * A non-Mbstring UTF-8 checker. - * - * @param $string - * @return bool - */ - protected static function seemsUtf8Regex($string) - { - // Obtained from http://stackoverflow.com/a/11709412/430062 with permission. - $regex = '/( - [\xC0-\xC1] # Invalid UTF-8 Bytes - | [\xF5-\xFF] # Invalid UTF-8 Bytes - | \xE0[\x80-\x9F] # Overlong encoding of prior code point - | \xF0[\x80-\x8F] # Overlong encoding of prior code point - | [\xC2-\xDF](?![\x80-\xBF]) # Invalid UTF-8 Sequence Start - | [\xE0-\xEF](?![\x80-\xBF]{2}) # Invalid UTF-8 Sequence Start - | [\xF0-\xF4](?![\x80-\xBF]{3}) # Invalid UTF-8 Sequence Start - | (?<=[\x0-\x7F\xF5-\xFF])[\x80-\xBF] # Invalid UTF-8 Sequence Middle - | (? - * - * @param string $brokenSerializedData - * @return string - */ - public static function fix_broken_serialization($brokenSerializedData) - { - $fixdSerializedData = preg_replace_callback('!s:(\d+):"(.*?)";!', function($matches) { - $snip = $matches[2]; - return 's:' . strlen($snip) . ':"' . $snip . '";'; - }, $brokenSerializedData); - - return $fixdSerializedData; - } - - /** - * Checks to see if the page is being server over SSL or not - * - * @return boolean - */ - public static function is_https() - { - return isset($_SERVER['HTTPS']) && !empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] != 'off'; - } - - /** - * Add or remove query arguments to the URL. - * - * @param mixed $newKey Either newkey or an associative array - * @param mixed $newValue Either newvalue or oldquery or uri - * @param mixed $uri URI or URL to append the queru/queries to. - * @return string - */ - public static function add_query_arg($newKey, $newValue = null, $uri = null) - { - // Was an associative array of key => value pairs passed? - if (is_array($newKey)) { - $newParams = $newKey; - - // Was the URL passed as an argument? - if (!is_null($newValue)) { - $uri = $newValue; - } elseif (!is_null($uri)) { - $uri = $uri; - } else { - $uri = self::array_get($_SERVER['REQUEST_URI'], ''); - } - } else { - $newParams = array($newKey => $newValue); - - // Was the URL passed as an argument? - $uri = is_null($uri) ? self::array_get($_SERVER['REQUEST_URI'], '') : $uri; - } - - // Parse the URI into it's components - $puri = parse_url($uri); - - if (isset($puri['query'])) { - parse_str($puri['query'], $queryParams); - $queryParams = array_merge($queryParams, $newParams); - } elseif (isset($puri['path']) && strstr($puri['path'], '=') !== false) { - $puri['query'] = $puri['path']; - unset($puri['path']); - parse_str($puri['query'], $queryParams); - $queryParams = array_merge($queryParams, $newParams); - } else { - $queryParams = $newParams; - } - - // Strip out any query params that are set to false. - // Properly handle valueless parameters. - foreach ($queryParams as $param => $value) { - if ($value === false) { - unset($queryParams[$param]); - } elseif ($value === null) { - $queryParams[$param] = ''; - } - } - - // Re-construct the query string - $puri['query'] = http_build_query($queryParams); - - // Strip = from valueless parameters. - $puri['query'] = preg_replace('/=(?=&|$)/', '', $puri['query']); - - - // Re-construct the entire URL - $nuri = self::http_build_url($puri); - - // Make the URI consistent with our input - if ($nuri[0] === '/' && strstr($uri, '/') === false) { - $nuri = substr($nuri, 1); - } - - if ($nuri[0] === '?' && strstr($uri, '?') === false) { - $nuri = substr($nuri, 1); - } - - return rtrim($nuri, '?'); - } - - /** - * Removes an item or list from the query string. - * - * @param string|array $keys Query key or keys to remove. - * @param bool $uri When false uses the $_SERVER value - * @return string - */ - public static function remove_query_arg($keys, $uri = null) - { - if (is_array($keys)) { - return self::add_query_arg(array_combine($keys, array_fill(0, count($keys), false)), $uri); - } - - return self::add_query_arg(array($keys => false), $uri); - } - - /** - * Build a URL. - * - * The parts of the second URL will be merged into the first according to - * the flags argument. - * - * @author Jake Smith - * @see https://github.com/jakeasmith/http_build_url/ - * - * @param mixed $url (part(s) of) an URL in form of a string or - * associative array like parse_url() returns - * @param mixed $parts same as the first argument - * @param int $flags a bitmask of binary or'ed HTTP_URL constants; - * HTTP_URL_REPLACE is the default - * @param array $new_url if set, it will be filled with the parts of the - * composed url like parse_url() would return - * @return string - */ - public static function http_build_url($url, $parts = array(), $flags = self::HTTP_URL_REPLACE, &$new_url = array()) - { - is_array($url) || $url = parse_url($url); - is_array($parts) || $parts = parse_url($parts); - - isset($url['query']) && is_string($url['query']) || $url['query'] = null; - isset($parts['query']) && is_string($parts['query']) || $parts['query'] = null; - - $keys = array('user', 'pass', 'port', 'path', 'query', 'fragment'); - - // HTTP_URL_STRIP_ALL and HTTP_URL_STRIP_AUTH cover several other flags. - if ($flags & self::HTTP_URL_STRIP_ALL) { - $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS - | self::HTTP_URL_STRIP_PORT | self::HTTP_URL_STRIP_PATH - | self::HTTP_URL_STRIP_QUERY | self::HTTP_URL_STRIP_FRAGMENT; - } elseif ($flags & self::HTTP_URL_STRIP_AUTH) { - $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS; - } - - // Schema and host are alwasy replaced - foreach (array('scheme', 'host') as $part) { - if (isset($parts[$part])) { - $url[$part] = $parts[$part]; - } - } - - if ($flags & self::HTTP_URL_REPLACE) { - foreach ($keys as $key) { - if (isset($parts[$key])) { - $url[$key] = $parts[$key]; - } - } - } else { - if (isset($parts['path']) && ($flags & self::HTTP_URL_JOIN_PATH)) { - if (isset($url['path']) && substr($parts['path'], 0, 1) !== '/') { - $url['path'] = rtrim( - str_replace(basename($url['path']), '', $url['path']), - '/' - ) . '/' . ltrim($parts['path'], '/'); - } else { - $url['path'] = $parts['path']; - } - } - - if (isset($parts['query']) && ($flags & self::HTTP_URL_JOIN_QUERY)) { - if (isset($url['query'])) { - parse_str($url['query'], $url_query); - parse_str($parts['query'], $parts_query); - - $url['query'] = http_build_query( - array_replace_recursive( - $url_query, - $parts_query - ) - ); - } else { - $url['query'] = $parts['query']; - } - } - } - - if (isset($url['path']) && substr($url['path'], 0, 1) !== '/') { - $url['path'] = '/' . $url['path']; - } - - foreach ($keys as $key) { - $strip = 'HTTP_URL_STRIP_' . strtoupper($key); - if ($flags & constant('utilphp\\util::' . $strip)) { - unset($url[$key]); - } - } - - $parsed_string = ''; - - if (isset($url['scheme'])) { - $parsed_string .= $url['scheme'] . '://'; - } - - if (isset($url['user'])) { - $parsed_string .= $url['user']; - - if (isset($url['pass'])) { - $parsed_string .= ':' . $url['pass']; - } - - $parsed_string .= '@'; - } - - if (isset($url['host'])) { - $parsed_string .= $url['host']; - } - - if (isset($url['port'])) { - $parsed_string .= ':' . $url['port']; - } - - if (!empty($url['path'])) { - $parsed_string .= $url['path']; - } else { - $parsed_string .= '/'; - } - - if (isset($url['query'])) { - $parsed_string .= '?' . $url['query']; - } - - if (isset($url['fragment'])) { - $parsed_string .= '#' . $url['fragment']; - } - - $new_url = $url; - - return $parsed_string; - } - - /** - * Converts many english words that equate to true or false to boolean. - * - * Supports 'y', 'n', 'yes', 'no' and a few other variations. - * - * @param string $string The string to convert to boolean - * @param bool $default The value to return if we can't match any - * yes/no words - * @return boolean - */ - public static function str_to_bool($string, $default = false) - { - $yes_words = 'affirmative|all right|aye|indubitably|most assuredly|ok|of course|okay|sure thing|y|yes+|yea|yep|sure|yeah|true|t|on|1|oui|vrai'; - $no_words = 'no*|no way|nope|nah|na|never|absolutely not|by no means|negative|never ever|false|f|off|0|non|faux'; - - if (preg_match('/^(' . $yes_words . ')$/i', $string)) { - return true; - } elseif (preg_match('/^(' . $no_words . ')$/i', $string)) { - return false; - } - - return $default; - } - - /** - * Check if a string starts with the given string. - * - * @param string $string - * @param string $starts_with - * @return boolean - */ - public static function starts_with($string, $starts_with) - { - return strpos($string, $starts_with) === 0; - } - - /** - * Check if a string ends with the given string. - * - * @param string $string - * @param string $starts_with - * @return boolean - */ - public static function ends_with($string, $ends_with) - { - return substr($string, -strlen($ends_with)) === $ends_with; - } - - /** - * Check if a string contains another string. - * - * @param string $haystack - * @param string $needle - * @return boolean - */ - public static function str_contains($haystack, $needle) - { - return strpos($haystack, $needle) !== false; - } - - /** - * Check if a string contains another string. This version is case - * insensitive. - * - * @param string $haystack - * @param string $needle - * @return boolean - */ - public static function str_icontains($haystack, $needle) - { - return stripos($haystack, $needle) !== false; - } - - /** - * Return the file extension of the given filename. - * - * @param string $filename - * @return string - */ - public static function get_file_ext($filename) - { - return pathinfo($filename, PATHINFO_EXTENSION); - } - - /** - * Removes a directory (and its contents) recursively. - * - * Contributed by Askar (ARACOOL) - * - * @param string $dir The directory to be deleted recursively - * @param bool $traverseSymlinks Delete contents of symlinks recursively - * @return bool - * @throws \RuntimeException - */ - public static function rmdir($dir, $traverseSymlinks = false) - { - if (!file_exists($dir)) { - return true; - } elseif (!is_dir($dir)) { - throw new \RuntimeException('Given path is not a directory'); - } - - if (!is_link($dir) || $traverseSymlinks) { - foreach (scandir($dir) as $file) { - if ($file === '.' || $file === '..') { - continue; - } - - $currentPath = $dir . '/' . $file; - - if (is_dir($currentPath)) { - self::rmdir($currentPath, $traverseSymlinks); - } elseif (!unlink($currentPath)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $currentPath); - // @codeCoverageIgnoreEnd - } - } - } - - // Windows treats removing directory symlinks identically to removing directories. - if (is_link($dir) && !defined('PHP_WINDOWS_VERSION_MAJOR')) { - if (!unlink($dir)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $dir); - // @codeCoverageIgnoreEnd - } - } else { - if (!rmdir($dir)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $dir); - // @codeCoverageIgnoreEnd - } - } - - return true; - } - - /** - * Convert entities, while preserving already-encoded entities. - * - * @param string $string The text to be converted - * @return string - */ - public static function htmlentities($string, $preserve_encoded_entities = false) - { - if ($preserve_encoded_entities) { - // @codeCoverageIgnoreStart - if (defined('HHVM_VERSION')) { - $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES); - } else { - $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES, self::mbInternalEncoding()); - } - // @codeCoverageIgnoreEnd - - $translation_table[chr(38)] = '&'; - return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); - } - - return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); - } - - /** - * Convert >, <, ', " and & to html entities, but preserves entities that - * are already encoded. - * - * @param string $string The text to be converted - * @return string - */ - public static function htmlspecialchars($string, $preserve_encoded_entities = false) - { - if ($preserve_encoded_entities) { - // @codeCoverageIgnoreStart - if (defined('HHVM_VERSION')) { - $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES); - } else { - $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES, self::mbInternalEncoding()); - } - // @codeCoverageIgnoreEnd - - $translation_table[chr(38)] = '&'; - - return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); - } - - return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); - } - - /** - * Transliterates characters to their ASCII equivalents. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - * - * @param string $text Text that might have not-ASCII characters - * @param string $language Specifies a priority for a specific language. - * @return string Filtered string with replaced "nice" characters - */ - public static function downcode($text, $language = '') - { - self::initLanguageMap($language); - - if (self::seems_utf8($text)) { - if (preg_match_all(self::$regex, $text, $matches)) { - for ($i = 0; $i < count($matches[0]); $i++) { - $char = $matches[0][$i]; - if (isset(self::$map[$char])) { - $text = str_replace($char, self::$map[$char], $text); - } - } - } - } else { - // Not a UTF-8 string so we assume its ISO-8859-1 - $search = "\x80\x83\x8a\x8e\x9a\x9e\x9f\xa2\xa5\xb5\xc0\xc1\xc2\xc3\xc4\xc5\xc7\xc8\xc9\xca\xcb\xcc\xcd"; - $search .= "\xce\xcf\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xe0\xe1\xe2\xe3\xe4\xe5\xe7\xe8\xe9"; - $search .= "\xea\xeb\xec\xed\xee\xef\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xff"; - $text = strtr($text, $search, 'EfSZszYcYuAAAAAACEEEEIIIINOOOOOOUUUUYaaaaaaceeeeiiiinoooooouuuuyy'); - - // These latin characters should be represented by two characters so - // we can't use strtr - $complexSearch = array("\x8c", "\x9c", "\xc6", "\xd0", "\xde", "\xdf", "\xe6", "\xf0", "\xfe"); - $complexReplace = array('OE', 'oe', 'AE', 'DH', 'TH', 'ss', 'ae', 'dh', 'th'); - $text = str_replace($complexSearch, $complexReplace, $text); - } - - return $text; - } - - /** - * Converts all accent characters to ASCII characters. - * - * If there are no accent characters, then the string given is just - * returned. - * - * @param string $string Text that might have accent characters - * @param string $language Specifies a priority for a specific language. - * @return string Filtered string with replaced "nice" characters - */ - public static function remove_accents($string, $language = '') - { - if (!preg_match('/[\x80-\xff]/', $string)) { - return $string; - } - - return self::downcode($string, $language); - } - - /** - * Strip all witespaces from the given string. - * - * @param string $string The string to strip - * @return string - */ - public static function strip_space($string) - { - return preg_replace('/\s+/', '', $string); - } - - /** - * Sanitize a string by performing the following operation : - * - Remove accents - * - Lower the string - * - Remove punctuation characters - * - Strip whitespaces - * - * @param string $string the string to sanitize - * @return string - */ - public static function sanitize_string($string) - { - $string = self::remove_accents($string); - $string = strtolower($string); - $string = preg_replace('/[^a-zA-Z 0-9]+/', '', $string); - $string = self::strip_space($string); - - return $string; - } - - /** - * Pads a given string with zeroes on the left. - * - * @param int $number The number to pad - * @param int $length The total length of the desired string - * @return string - */ - public static function zero_pad($number, $length) - { - return str_pad($number, $length, '0', STR_PAD_LEFT); - } - - /** - * Converts a unix timestamp to a relative time string, such as "3 days ago" - * or "2 weeks ago". - * - * @param int $from The date to use as a starting point - * @param int $to The date to compare to, defaults to now - * @param string $suffix The string to add to the end, defaults to " ago" - * @return string - */ - public static function human_time_diff($from, $to = '', $as_text = false, $suffix = ' ago') - { - if ($to == '') { - $to = time(); - } - - $from = new \DateTime(date('Y-m-d H:i:s', $from)); - $to = new \DateTime(date('Y-m-d H:i:s', $to)); - $diff = $from->diff($to); - - if ($diff->y > 1) { - $text = $diff->y . ' years'; - } elseif ($diff->y == 1) { - $text = '1 year'; - } elseif ($diff->m > 1) { - $text = $diff->m . ' months'; - } elseif ($diff->m == 1) { - $text = '1 month'; - } elseif ($diff->d > 7) { - $text = ceil($diff->d / 7) . ' weeks'; - } elseif ($diff->d == 7) { - $text = '1 week'; - } elseif ($diff->d > 1) { - $text = $diff->d . ' days'; - } elseif ($diff->d == 1) { - $text = '1 day'; - } elseif ($diff->h > 1) { - $text = $diff->h . ' hours'; - } elseif ($diff->h == 1) { - $text = ' 1 hour'; - } elseif ($diff->i > 1) { - $text = $diff->i . ' minutes'; - } elseif ($diff->i == 1) { - $text = '1 minute'; - } elseif ($diff->s > 1) { - $text = $diff->s . ' seconds'; - } else { - $text = '1 second'; - } - - if ($as_text) { - $text = explode(' ', $text, 2); - $text = self::number_to_word($text[0]) . ' ' . $text[1]; - } - - return trim($text) . $suffix; - } - - /** - * Converts a number into the text equivalent. For example, 456 becomes four - * hundred and fifty-six. - * - * Part of the IntToWords Project. - * - * @param int|float $number The number to convert into text - * @return string - */ - public static function number_to_word($number) - { - $number = (string) $number; - - if (strpos($number, '.') !== false) { - list($number, $decimal) = explode('.', $number); - } else { - $decimal = false; - } - - $output = ''; - - if ($number[0] == '-') { - $output = 'negative '; - $number = ltrim($number, '-'); - } elseif ($number[0] == '+') { - $output = 'positive '; - $number = ltrim($number, '+'); - } - - if ($number[0] == '0') { - $output .= 'zero'; - } else { - $length = 19; - $number = str_pad($number, 60, '0', STR_PAD_LEFT); - $group = rtrim(chunk_split($number, 3, ' '), ' '); - $groups = explode(' ', $group); - $groups2 = array(); - - foreach ($groups as $group) { - $group[1] = isset($group[1]) ? $group[1] : null; - $group[2] = isset($group[2]) ? $group[2] : null; - $groups2[] = self::numberToWordThreeDigits($group[0], $group[1], $group[2]); - } - - for ($z = 0; $z < count($groups2); $z++) { - if ($groups2[$z] != '') { - $output .= $groups2[$z] . self::numberToWordConvertGroup($length - $z); - $output .= ($z < $length && ! array_search('', array_slice($groups2, $z + 1, -1)) && $groups2[$length] != '' && $groups[$length][0] == '0' ? ' and ' : ', '); - } - } - - $output = rtrim($output, ', '); - } - - if ($decimal > 0) { - $output .= ' point'; - - for ($i = 0; $i < strlen($decimal); $i++) { - $output .= ' ' . self::numberToWordConvertDigit($decimal[$i]); - } - } - - return $output; - } - - protected static function numberToWordConvertGroup($index) - { - switch($index) { - case 11: - return ' decillion'; - case 10: - return ' nonillion'; - case 9: - return ' octillion'; - case 8: - return ' septillion'; - case 7: - return ' sextillion'; - case 6: - return ' quintrillion'; - case 5: - return ' quadrillion'; - case 4: - return ' trillion'; - case 3: - return ' billion'; - case 2: - return ' million'; - case 1: - return ' thousand'; - case 0: - return ''; - } - - return ''; - } - - protected static function numberToWordThreeDigits($digit1, $digit2, $digit3) - { - $output = ''; - - if ($digit1 == '0' && $digit2 == '0' && $digit3 == '0') { - return ''; - } - - if ($digit1 != '0') { - $output .= self::numberToWordConvertDigit($digit1) . ' hundred'; - - if ($digit2 != '0' || $digit3 != '0') { - $output .= ' and '; - } - } - if ($digit2 != '0') { - $output .= self::numberToWordTwoDigits($digit2, $digit3); - } elseif ($digit3 != '0') { - $output .= self::numberToWordConvertDigit($digit3); - } - - return $output; - } - - protected static function numberToWordTwoDigits($digit1, $digit2) - { - if ($digit2 == '0') { - switch ($digit1) { - case '1': - return 'ten'; - case '2': - return 'twenty'; - case '3': - return 'thirty'; - case '4': - return 'forty'; - case '5': - return 'fifty'; - case '6': - return 'sixty'; - case '7': - return 'seventy'; - case '8': - return 'eighty'; - case '9': - return 'ninety'; - } - } elseif ($digit1 == '1') { - switch ($digit2) { - case '1': - return 'eleven'; - case '2': - return 'twelve'; - case '3': - return 'thirteen'; - case '4': - return 'fourteen'; - case '5': - return 'fifteen'; - case '6': - return 'sixteen'; - case '7': - return 'seventeen'; - case '8': - return 'eighteen'; - case '9': - return 'nineteen'; - } - } else { - $second_digit = self::numberToWordConvertDigit($digit2); - - switch ($digit1) { - case '2': - return "twenty-{$second_digit}"; - case '3': - return "thirty-{$second_digit}"; - case '4': - return "forty-{$second_digit}"; - case '5': - return "fifty-{$second_digit}"; - case '6': - return "sixty-{$second_digit}"; - case '7': - return "seventy-{$second_digit}"; - case '8': - return "eighty-{$second_digit}"; - case '9': - return "ninety-{$second_digit}"; - } - } - } - - /** - * @param $digit - * @return string - * @throws \LogicException - */ - protected static function numberToWordConvertDigit($digit) - { - switch ($digit) { - case '0': - return 'zero'; - case '1': - return 'one'; - case '2': - return 'two'; - case '3': - return 'three'; - case '4': - return 'four'; - case '5': - return 'five'; - case '6': - return 'six'; - case '7': - return 'seven'; - case '8': - return 'eight'; - case '9': - return 'nine'; - default: - throw new \LogicException('Not a number'); - } - } - - /** - * Calculates percentage of numerator and denominator. - * - * @param int|float $numerator - * @param int|float $denominator - * @param int $decimals - * @param string $dec_point - * @param string $thousands_sep - * @return int|float - */ - public static function calculate_percentage($numerator, $denominator, $decimals = 2, $dec_point = '.', $thousands_sep = ',') - { - return number_format(($numerator / $denominator) * 100, $decimals, $dec_point, $thousands_sep); - } - - /** - * Transmit UTF-8 content headers if the headers haven't already been sent. - * - * @param string $content_type The content type to send out - * @return boolean - */ - public static function utf8_headers($content_type = 'text/html') - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - header('Content-type: ' . $content_type . '; charset=utf-8'); - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Transmit headers that force a browser to display the download file - * dialog. Cross browser compatible. Only fires if headers have not - * already been sent. - * - * @param string $filename The name of the filename to display to - * browsers - * @param string $content The content to output for the download. - * If you don't specify this, just the - * headers will be sent - * @return boolean - */ - public static function force_download($filename, $content = false) - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - // Required for some browsers - if (ini_get('zlib.output_compression')) { - @ini_set('zlib.output_compression', 'Off'); - } - - header('Pragma: public'); - header('Expires: 0'); - header('Cache-Control: must-revalidate, post-check=0, pre-check=0'); - - // Required for certain browsers - header('Cache-Control: private', false); - - header('Content-Disposition: attachment; filename="' . basename(str_replace('"', '', $filename)) . '";'); - header('Content-Type: application/force-download'); - header('Content-Transfer-Encoding: binary'); - - if ($content) { - header('Content-Length: ' . strlen($content)); - } - - ob_clean(); - flush(); - - if ($content) { - echo $content; - } - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Sets the headers to prevent caching for the different browsers. - * - * Different browsers support different nocache headers, so several - * headers must be sent so that all of them get the point that no - * caching should occur - * - * @return boolean - */ - public static function nocache_headers() - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - header('Expires: Wed, 11 Jan 1984 05:00:00 GMT'); - header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT'); - header('Cache-Control: no-cache, must-revalidate, max-age=0'); - header('Pragma: no-cache'); - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Generates a string of random characters. - * - * @throws LengthException If $length is bigger than the available - * character pool and $no_duplicate_chars is - * enabled - * - * @param integer $length The length of the string to - * generate - * @param boolean $human_friendly Whether or not to make the - * string human friendly by - * removing characters that can be - * confused with other characters ( - * O and 0, l and 1, etc) - * @param boolean $include_symbols Whether or not to include - * symbols in the string. Can not - * be enabled if $human_friendly is - * true - * @param boolean $no_duplicate_chars Whether or not to only use - * characters once in the string. - * @return string - */ - public static function random_string($length = 16, $human_friendly = true, $include_symbols = false, $no_duplicate_chars = false) - { - $nice_chars = 'ABCDEFGHJKLMNPQRSTUVWXYZabcdefhjkmnprstuvwxyz23456789'; - $all_an = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890'; - $symbols = '!@#$%^&*()~_-=+{}[]|:;<>,.?/"\'\\`'; - $string = ''; - - // Determine the pool of available characters based on the given parameters - if ($human_friendly) { - $pool = $nice_chars; - } else { - $pool = $all_an; - - if ($include_symbols) { - $pool .= $symbols; - } - } - - if (!$no_duplicate_chars) { - return substr(str_shuffle(str_repeat($pool, $length)), 0, $length); - } - - // Don't allow duplicate letters to be disabled if the length is - // longer than the available characters - if ($no_duplicate_chars && strlen($pool) < $length) { - throw new \LengthException('$length exceeds the size of the pool and $no_duplicate_chars is enabled'); - } - - // Convert the pool of characters into an array of characters and - // shuffle the array - $pool = str_split($pool); - $poolLength = count($pool); - $rand = mt_rand(0, $poolLength - 1); - - // Generate our string - for ($i = 0; $i < $length; $i++) { - $string .= $pool[$rand]; - - // Remove the character from the array to avoid duplicates - array_splice($pool, $rand, 1); - - // Generate a new number - if (($poolLength - 2 - $i) > 0) { - $rand = mt_rand(0, $poolLength - 2 - $i); - } else { - $rand = 0; - } - } - - return $string; - } - - /** - * Generate secure random string of given length - * If 'openssl_random_pseudo_bytes' is not available - * then generate random string using default function - * - * Part of the Laravel Project - * - * @param int $length length of string - * @return bool - */ - public static function secure_random_string($length = 16) - { - if (function_exists('openssl_random_pseudo_bytes')) { - $bytes = openssl_random_pseudo_bytes($length * 2); - - if ($bytes === false) { - throw new \LengthException('$length is not accurate, unable to generate random string'); - } - - return substr(str_replace(array('/', '+', '='), '', base64_encode($bytes)), 0, $length); - } - - // @codeCoverageIgnoreStart - return static::random_string($length); - // @codeCoverageIgnoreEnd - } - - /** - * Check if a given string matches a given pattern. - * - * Contributed by Abhimanyu Sharma - * - * @param string $pattern Parttern of string exptected - * @param string $string String that need to be matched - * @return bool - */ - public static function match_string($pattern, $string, $caseSensitive = true) - { - if ($pattern == $string) { - return true; - } - - // Preg flags - $flags = $caseSensitive ? '' : 'i'; - - // Escape any regex special characters - $pattern = preg_quote($pattern, '#'); - - // Unescape * which is our wildcard character and change it to .* - $pattern = str_replace('\*', '.*', $pattern); - - return (bool) preg_match('#^' . $pattern . '$#' . $flags, $string); - } - - /** - * Validate an email address. - * - * @param string $possible_email An email address to validate - * @return bool - */ - public static function validate_email($possible_email) - { - return (bool) filter_var($possible_email, FILTER_VALIDATE_EMAIL); - } - - /** - * Return the URL to a user's gravatar. - * - * @param string $email The email of the user - * @param integer $size The size of the gravatar - * @return string - */ - public static function get_gravatar($email, $size = 32) - { - if (self::is_https()) { - $url = 'https://secure.gravatar.com/'; - } else { - $url = 'http://www.gravatar.com/'; - } - - $url .= 'avatar/' . md5($email) . '?s=' . (int) abs($size); - - return $url; - } - - /** - * Turns all of the links in a string into HTML links. - * - * Part of the LinkifyURL Project - * - * @param string $text The string to parse - * @return string - */ - public static function linkify($text) - { - $text = preg_replace('/'/', ''', $text); // IE does not handle ' entity! - $section_html_pattern = '%# Rev:20100913_0900 github.com/jmrware/LinkifyURL - # Section text into HTML tags and everything else. - ( # $1: Everything not HTML tag. - [^<]+(?:(?!... tag. - ]*> # opening tag. - [^<]*(?:(?! # closing tag. - ) # End $2: - %ix'; - - return preg_replace_callback($section_html_pattern, array(__CLASS__, 'linkifyCallback'), $text); - } - - /** - * Callback for the preg_replace in the linkify() method. - * - * Part of the LinkifyURL Project - * - * @param array $matches Matches from the preg_ function - * @return string - */ - protected static function linkifyRegex($text) - { - $url_pattern = '/# Rev:20100913_0900 github.com\/jmrware\/LinkifyURL - # Match http & ftp URL that is not already linkified. - # Alternative 1: URL delimited by (parentheses). - (\() # $1 "(" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $2: URL. - (\)) # $3: ")" end delimiter. - | # Alternative 2: URL delimited by [square brackets]. - (\[) # $4: "[" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $5: URL. - (\]) # $6: "]" end delimiter. - | # Alternative 3: URL delimited by {curly braces}. - (\{) # $7: "{" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $8: URL. - (\}) # $9: "}" end delimiter. - | # Alternative 4: URL delimited by . - (<|&(?:lt|\#60|\#x3c);) # $10: "<" start delimiter (or HTML entity). - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $11: URL. - (>|&(?:gt|\#62|\#x3e);) # $12: ">" end delimiter (or HTML entity). - | # Alternative 5: URL not delimited by (), [], {} or <>. - (# $13: Prefix proving URL not already linked. - (?: ^ # Can be a beginning of line or string, or - | [^=\s\'"\]] # a non-"=", non-quote, non-"]", followed by - ) \s*[\'"]? # optional whitespace and optional quote; - | [^=\s]\s+ # or... a non-equals sign followed by whitespace. - ) # End $13. Non-prelinkified-proof prefix. - (\b # $14: Other non-delimited URL. - (?:ht|f)tps?:\/\/ # Required literal http, https, ftp or ftps prefix. - [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]+ # All URI chars except "&" (normal*). - (?: # Either on a "&" or at the end of URI. - (?! # Allow a "&" char only if not start of an... - &(?:gt|\#0*62|\#x0*3e); # HTML ">" entity, or - | &(?:amp|apos|quot|\#0*3[49]|\#x0*2[27]); # a [&\'"] entity if - [.!&\',:?;]? # followed by optional punctuation then - (?:[^a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]|$) # a non-URI char or EOS. - ) & # If neg-assertion true, match "&" (special). - [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]* # More non-& URI chars (normal*). - )* # Unroll-the-loop (special normal*)*. - [a-z0-9\-_~$()*+=\/#[\]@%] # Last char can\'t be [.!&\',;:?] - ) # End $14. Other non-delimited URL. - /imx'; - - $url_replace = '$1$4$7$10$13$2$5$8$11$14$3$6$9$12'; - - return preg_replace($url_pattern, $url_replace, $text); - } - - /** - * Callback for the preg_replace in the linkify() method. - * - * Part of the LinkifyURL Project - * - * @param array $matches Matches from the preg_ function - * @return string - */ - protected static function linkifyCallback($matches) - { - if (isset($matches[2])) { - return $matches[2]; - } - - return self::linkifyRegex($matches[1]); - } - - /** - * Return the current URL. - * - * @return string - */ - public static function get_current_url() - { - $url = ''; - - // Check to see if it's over https - $is_https = self::is_https(); - if ($is_https) { - $url .= 'https://'; - } else { - $url .= 'http://'; - } - - // Was a username or password passed? - if (isset($_SERVER['PHP_AUTH_USER'])) { - $url .= $_SERVER['PHP_AUTH_USER']; - - if (isset($_SERVER['PHP_AUTH_PW'])) { - $url .= ':' . $_SERVER['PHP_AUTH_PW']; - } - - $url .= '@'; - } - - - // We want the user to stay on the same host they are currently on, - // but beware of security issues - // see http://shiflett.org/blog/2006/mar/server-name-versus-http-host - $url .= $_SERVER['HTTP_HOST']; - - $port = $_SERVER['SERVER_PORT']; - - // Is it on a non standard port? - if ($is_https && ($port != 443)) { - $url .= ':' . $_SERVER['SERVER_PORT']; - } elseif (!$is_https && ($port != 80)) { - $url .= ':' . $_SERVER['SERVER_PORT']; - } - - // Get the rest of the URL - if (!isset($_SERVER['REQUEST_URI'])) { - // Microsoft IIS doesn't set REQUEST_URI by default - $url .= $_SERVER['PHP_SELF']; - - if (isset($_SERVER['QUERY_STRING'])) { - $url .= '?' . $_SERVER['QUERY_STRING']; - } - } else { - $url .= $_SERVER['REQUEST_URI']; - } - - return $url; - } - - /** - * Returns the IP address of the client. - * - * @param boolean $trust_proxy_headers Whether or not to trust the - * proxy headers HTTP_CLIENT_IP - * and HTTP_X_FORWARDED_FOR. ONLY - * use if your server is behind a - * proxy that sets these values - * @return string - */ - public static function get_client_ip($trust_proxy_headers = false) - { - if (!$trust_proxy_headers) { - return $_SERVER['REMOTE_ADDR']; - } - - if (!empty($_SERVER['HTTP_CLIENT_IP'])) { - $ip = $_SERVER['HTTP_CLIENT_IP']; - } elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { - $ip = $_SERVER['HTTP_X_FORWARDED_FOR']; - } else { - $ip = $_SERVER['REMOTE_ADDR']; - } - - return $ip; - } - - /** - * Truncate a string to a specified length without cutting a word off. - * - * @param string $string The string to truncate - * @param integer $length The length to truncate the string to - * @param string $append Text to append to the string IF it gets - * truncated, defaults to '...' - * @return string - */ - public static function safe_truncate($string, $length, $append = '...') - { - $ret = substr($string, 0, $length); - $last_space = strrpos($ret, ' '); - - if ($last_space !== false && $string != $ret) { - $ret = substr($ret, 0, $last_space); - } - - if ($ret != $string) { - $ret .= $append; - } - - return $ret; - } - - - /** - * Truncate the string to given length of characters. - * - * @param string $string The variable to truncate - * @param integer $limit The length to truncate the string to - * @param string $append Text to append to the string IF it gets - * truncated, defaults to '...' - * @return string - */ - public static function limit_characters($string, $limit = 100, $append = '...') - { - if (mb_strlen($string) <= $limit) { - return $string; - } - - return rtrim(mb_substr($string, 0, $limit, 'UTF-8')) . $append; - } - - /** - * Truncate the string to given length of words. - * - * @param $string - * @param $limit - * @param string $append - * @return string - */ - public static function limit_words($string, $limit = 100, $append = '...') - { - preg_match('/^\s*+(?:\S++\s*+){1,' . $limit . '}/u', $string, $matches); - - if (!isset($matches[0]) || strlen($string) === strlen($matches[0])) { - return $string; - } - - return rtrim($matches[0]).$append; - } - - /** - * Returns the ordinal version of a number (appends th, st, nd, rd). - * - * @param string $number The number to append an ordinal suffix to - * @return string - */ - public static function ordinal($number) - { - $test_c = abs($number) % 10; - $ext = ((abs($number) % 100 < 21 && abs($number) % 100 > 4) ? 'th' : (($test_c < 4) ? ($test_c < 3) ? ($test_c < 2) ? ($test_c < 1) ? 'th' : 'st' : 'nd' : 'rd' : 'th')); - - return $number . $ext; - } - - /** - * Returns the file permissions as a nice string, like -rw-r--r-- or false - * if the file is not found. - * - * @param string $file The name of the file to get permissions form - * @param int $perms Numerical value of permissions to display as text. - * @return string - */ - public static function full_permissions($file, $perms = null) - { - if (is_null($perms)) { - if (!file_exists($file)) { - return false; - } - $perms = fileperms($file); - } - - if (($perms & 0xC000) == 0xC000) { - // Socket - $info = 's'; - } elseif (($perms & 0xA000) == 0xA000) { - // Symbolic Link - $info = 'l'; - } elseif (($perms & 0x8000) == 0x8000) { - // Regular - $info = '-'; - } elseif (($perms & 0x6000) == 0x6000) { - // Block special - $info = 'b'; - } elseif (($perms & 0x4000) == 0x4000) { - // Directory - $info = 'd'; - } elseif (($perms & 0x2000) == 0x2000) { - // Character special - $info = 'c'; - } elseif (($perms & 0x1000) == 0x1000) { - // FIFO pipe - $info = 'p'; - } else { - // Unknown - $info = 'u'; - } - - // Owner - $info .= (($perms & 0x0100) ? 'r' : '-'); - $info .= (($perms & 0x0080) ? 'w' : '-'); - $info .= (($perms & 0x0040) ? - (($perms & 0x0800) ? 's' : 'x') : - (($perms & 0x0800) ? 'S' : '-')); - - // Group - $info .= (($perms & 0x0020) ? 'r' : '-'); - $info .= (($perms & 0x0010) ? 'w' : '-'); - $info .= (($perms & 0x0008) ? - (($perms & 0x0400) ? 's' : 'x') : - (($perms & 0x0400) ? 'S' : '-')); - - // World - $info .= (($perms & 0x0004) ? 'r' : '-'); - $info .= (($perms & 0x0002) ? 'w' : '-'); - $info .= (($perms & 0x0001) ? - (($perms & 0x0200) ? 't' : 'x') : - (($perms & 0x0200) ? 'T' : '-')); - - return $info; - } - - /** - * Returns the first element in an array. - * - * @param array $array - * @return mixed - */ - public static function array_first(array $array) - { - return reset($array); - } - - /** - * Returns the last element in an array. - * - * @param array $array - * @return mixed - */ - public static function array_last(array $array) - { - return end($array); - } - - /** - * Returns the first key in an array. - * - * @param array $array - * @return int|string - */ - public static function array_first_key(array $array) - { - reset($array); - - return key($array); - } - - /** - * Returns the last key in an array. - * - * @param array $array - * @return int|string - */ - public static function array_last_key(array $array) - { - end($array); - - return key($array); - } - - /** - * Flatten a multi-dimensional array into a one dimensional array. - * - * Contributed by Theodore R. Smith of PHP Experts, Inc. - * - * @param array $array The array to flatten - * @param boolean $preserve_keys Whether or not to preserve array keys. - * Keys from deeply nested arrays will - * overwrite keys from shallowy nested arrays - * @return array - */ - public static function array_flatten(array $array, $preserve_keys = true) - { - $flattened = array(); - - array_walk_recursive($array, function($value, $key) use (&$flattened, $preserve_keys) { - if ($preserve_keys && !is_int($key)) { - $flattened[$key] = $value; - } else { - $flattened[] = $value; - } - }); - - return $flattened; - } - - /** - * Accepts an array, and returns an array of values from that array as - * specified by $field. For example, if the array is full of objects - * and you call util::array_pluck($array, 'name'), the function will - * return an array of values from $array[]->name. - * - * @param array $array An array - * @param string $field The field to get values from - * @param boolean $preserve_keys Whether or not to preserve the - * array keys - * @param boolean $remove_nomatches If the field doesn't appear to be set, - * remove it from the array - * @return array - */ - public static function array_pluck(array $array, $field, $preserve_keys = true, $remove_nomatches = true) - { - $new_list = array(); - - foreach ($array as $key => $value) { - if (is_object($value)) { - if (isset($value->{$field})) { - if ($preserve_keys) { - $new_list[$key] = $value->{$field}; - } else { - $new_list[] = $value->{$field}; - } - } elseif (!$remove_nomatches) { - $new_list[$key] = $value; - } - } else { - if (isset($value[$field])) { - if ($preserve_keys) { - $new_list[$key] = $value[$field]; - } else { - $new_list[] = $value[$field]; - } - } elseif (!$remove_nomatches) { - $new_list[$key] = $value; - } - } - } - - return $new_list; - } - - /** - * Searches for a given value in an array of arrays, objects and scalar - * values. You can optionally specify a field of the nested arrays and - * objects to search in. - * - * @param array $array The array to search - * @param scalar $search The value to search for - * @param string $field The field to search in, if not specified - * all fields will be searched - * @return boolean|scalar False on failure or the array key on success - */ - public static function array_search_deep(array $array, $search, $field = false) - { - // *grumbles* stupid PHP type system - $search = (string) $search; - - foreach ($array as $key => $elem) { - // *grumbles* stupid PHP type system - $key = (string) $key; - - if ($field) { - if (is_object($elem) && $elem->{$field} === $search) { - return $key; - } elseif (is_array($elem) && $elem[$field] === $search) { - return $key; - } elseif (is_scalar($elem) && $elem === $search) { - return $key; - } - } else { - if (is_object($elem)) { - $elem = (array) $elem; - - if (in_array($search, $elem)) { - return $key; - } - } elseif (is_array($elem) && in_array($search, $elem)) { - return $key; - } elseif (is_scalar($elem) && $elem === $search) { - return $key; - } - } - } - - return false; - } - - /** - * Returns an array containing all the elements of arr1 after applying - * the callback function to each one. - * - * @param string $callback Callback function to run for each - * element in each array - * @param array $array An array to run through the callback - * function - * @param boolean $on_nonscalar Whether or not to call the callback - * function on nonscalar values - * (Objects, resources, etc) - * @return array - */ - public static function array_map_deep(array $array, $callback, $on_nonscalar = false) - { - foreach ($array as $key => $value) { - if (is_array($value)) { - $args = array($value, $callback, $on_nonscalar); - $array[$key] = call_user_func_array(array(__CLASS__, __FUNCTION__), $args); - } elseif (is_scalar($value) || $on_nonscalar) { - $array[$key] = call_user_func($callback, $value); - } - } - - return $array; - } - - public static function array_clean(array $array) - { - return array_filter($array); - } - - /** - * Wrapper to prevent errors if the user doesn't have the mbstring - * extension installed. - * - * @param string $encoding - * @return string - */ - protected static function mbInternalEncoding($encoding = null) - { - if (function_exists('mb_internal_encoding')) { - return $encoding ? mb_internal_encoding($encoding) : mb_internal_encoding(); - } - - // @codeCoverageIgnoreStart - return 'UTF-8'; - // @codeCoverageIgnoreEnd - } - - /** - * Set the writable bit on a file to the minimum value that allows the user - * running PHP to write to it. - * - * @param string $filename The filename to set the writable bit on - * @param boolean $writable Whether to make the file writable or not - * @return boolean - */ - public static function set_writable($filename, $writable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($writable) { - // Set only the user writable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0200); - } - - // Set only the group writable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0220); - } - - // Set the world writable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0222); - } else { - // Set only the user writable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0222) ^ 0222); - } - - // Set only the group writable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0222) ^ 0022); - } - - // Set the world writable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0222) ^ 0002); - } - } - - /** - * Set the readable bit on a file to the minimum value that allows the user - * running PHP to read to it. - * - * @param string $filename The filename to set the readable bit on - * @param boolean $readable Whether to make the file readable or not - * @return boolean - */ - public static function set_readable($filename, $readable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($readable) { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0400); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0440); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0444); - } else { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0444) ^ 0444); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0444) ^ 0044); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0444) ^ 0004); - } - } - - /** - * Set the executable bit on a file to the minimum value that allows the - * user running PHP to read to it. - * - * @param string $filename The filename to set the executable bit on - * @param boolean $executable Whether to make the file executable or not - * @return boolean - */ - public static function set_executable($filename, $executable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($executable) { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0100); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0110); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0111); - } else { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0111) ^ 0111); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0111) ^ 0011); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0111) ^ 0001); - } - } - - /** - * Returns size of a given directory in bytes. - * - * @param string $dir - * @return integer - */ - public static function directory_size($dir) - { - $size = 0; - foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $file => $key) { - if ($key->isFile()) { - $size += $key->getSize(); - } - } - return $size; - } - - /** - * Returns a home directory of current user. - * - * @return string - */ - public static function get_user_directory() - { - if (isset($_SERVER['HOMEDRIVE'])) return $_SERVER['HOMEDRIVE'] . $_SERVER['HOMEPATH']; - else return $_SERVER['HOME']; - } - - /** - * Returns all paths inside a directory. - * - * @param string $dir - * @return array - */ - public static function directory_contents($dir) - { - $contents = array(); - foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::KEY_AS_PATHNAME | \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $pathname => $fi) { - $contents[] = $pathname; - } - natsort($contents); - return $contents; - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts deleted file mode 100644 index af10d41..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.d.ts +++ /dev/null @@ -1,89 +0,0 @@ -declare namespace npmRunPath { - interface RunPathOptions { - /** - Working directory. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key). - - Set it to an empty string to exclude the default PATH. - */ - readonly path?: string; - - /** - Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. - - This can be either an absolute path or a path relative to the `cwd` option. - - @default process.execPath - */ - readonly execPath?: string; - } - - interface ProcessEnv { - [key: string]: string | undefined; - } - - interface EnvOptions { - /** - Working directory. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. - */ - readonly env?: ProcessEnv; - - /** - Path to the current Node.js executable. Its directory is pushed to the front of PATH. - - This can be either an absolute path or a path relative to the `cwd` option. - - @default process.execPath - */ - readonly execPath?: string; - } -} - -declare const npmRunPath: { - /** - Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries. - - @returns The augmented path string. - - @example - ``` - import * as childProcess from 'child_process'; - import npmRunPath = require('npm-run-path'); - - console.log(process.env.PATH); - //=> '/usr/local/bin' - - console.log(npmRunPath()); - //=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' - - // `foo` is a locally installed binary - childProcess.execFileSync('foo', { - env: npmRunPath.env() - }); - ``` - */ - (options?: npmRunPath.RunPathOptions): string; - - /** - @returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. - */ - env(options?: npmRunPath.EnvOptions): npmRunPath.ProcessEnv; - - // TODO: Remove this for the next major release - default: typeof npmRunPath; -}; - -export = npmRunPath; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js deleted file mode 100644 index 8c94abc..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/index.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; -const path = require('path'); -const pathKey = require('path-key'); - -const npmRunPath = options => { - options = { - cwd: process.cwd(), - path: process.env[pathKey()], - execPath: process.execPath, - ...options - }; - - let previous; - let cwdPath = path.resolve(options.cwd); - const result = []; - - while (previous !== cwdPath) { - result.push(path.join(cwdPath, 'node_modules/.bin')); - previous = cwdPath; - cwdPath = path.resolve(cwdPath, '..'); - } - - // Ensure the running `node` binary is used - const execPathDir = path.resolve(options.cwd, options.execPath, '..'); - result.push(execPathDir); - - return result.concat(options.path).join(path.delimiter); -}; - -module.exports = npmRunPath; -// TODO: Remove this for the next major release -module.exports.default = npmRunPath; - -module.exports.env = options => { - options = { - env: process.env, - ...options - }; - - const env = {...options.env}; - const path = pathKey({env}); - - options.path = env[path]; - env[path] = module.exports(options); - - return env; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json deleted file mode 100644 index feb8c00..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "npm-run-path", - "version": "4.0.1", - "description": "Get your PATH prepended with locally installed binaries", - "license": "MIT", - "repository": "sindresorhus/npm-run-path", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "npm", - "run", - "path", - "package", - "bin", - "binary", - "binaries", - "script", - "cli", - "command-line", - "execute", - "executable" - ], - "dependencies": { - "path-key": "^3.0.0" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md deleted file mode 100644 index 557fbeb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/npm-run-path/readme.md +++ /dev/null @@ -1,115 +0,0 @@ -# npm-run-path [![Build Status](https://travis-ci.org/sindresorhus/npm-run-path.svg?branch=master)](https://travis-ci.org/sindresorhus/npm-run-path) - -> Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries - -In [npm run scripts](https://docs.npmjs.com/cli/run-script) you can execute locally installed binaries by name. This enables the same outside npm. - - -## Install - -``` -$ npm install npm-run-path -``` - - -## Usage - -```js -const childProcess = require('child_process'); -const npmRunPath = require('npm-run-path'); - -console.log(process.env.PATH); -//=> '/usr/local/bin' - -console.log(npmRunPath()); -//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' - -// `foo` is a locally installed binary -childProcess.execFileSync('foo', { - env: npmRunPath.env() -}); -``` - - -## API - -### npmRunPath(options?) - -Returns the augmented path string. - -#### options - -Type: `object` - -##### cwd - -Type: `string`
-Default: `process.cwd()` - -Working directory. - -##### path - -Type: `string`
-Default: [`PATH`](https://github.com/sindresorhus/path-key) - -PATH to be appended.
-Set it to an empty string to exclude the default PATH. - -##### execPath - -Type: `string`
-Default: `process.execPath` - -Path to the current Node.js executable. Its directory is pushed to the front of PATH. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - -### npmRunPath.env(options?) - -Returns the augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. - -#### options - -Type: `object` - -##### cwd - -Type: `string`
-Default: `process.cwd()` - -Working directory. - -##### env - -Type: `Object` - -Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. - -##### execPath - -Type: `string`
-Default: `process.execPath` - -Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - - -## Related - -- [npm-run-path-cli](https://github.com/sindresorhus/npm-run-path-cli) - CLI for this module -- [execa](https://github.com/sindresorhus/execa) - Execute a locally installed binary - - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts deleted file mode 100644 index ea84cab..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.d.ts +++ /dev/null @@ -1,64 +0,0 @@ -declare namespace onetime { - interface Options { - /** - Throw an error when called more than once. - - @default false - */ - throw?: boolean; - } -} - -declare const onetime: { - /** - Ensure a function is only called once. When called multiple times it will return the return value from the first call. - - @param fn - Function that should only be called once. - @returns A function that only calls `fn` once. - - @example - ``` - import onetime = require('onetime'); - - let i = 0; - - const foo = onetime(() => ++i); - - foo(); //=> 1 - foo(); //=> 1 - foo(); //=> 1 - - onetime.callCount(foo); //=> 3 - ``` - */ - ( - fn: (...arguments: ArgumentsType) => ReturnType, - options?: onetime.Options - ): (...arguments: ArgumentsType) => ReturnType; - - /** - Get the number of times `fn` has been called. - - @param fn - Function to get call count from. - @returns A number representing how many times `fn` has been called. - - @example - ``` - import onetime = require('onetime'); - - const foo = onetime(() => {}); - foo(); - foo(); - foo(); - - console.log(onetime.callCount(foo)); - //=> 3 - ``` - */ - callCount(fn: (...arguments: any[]) => unknown): number; - - // TODO: Remove this for the next major release - default: typeof onetime; -}; - -export = onetime; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js deleted file mode 100644 index 99c5fc1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/index.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; -const mimicFn = require('mimic-fn'); - -const calledFunctions = new WeakMap(); - -const onetime = (function_, options = {}) => { - if (typeof function_ !== 'function') { - throw new TypeError('Expected a function'); - } - - let returnValue; - let callCount = 0; - const functionName = function_.displayName || function_.name || ''; - - const onetime = function (...arguments_) { - calledFunctions.set(onetime, ++callCount); - - if (callCount === 1) { - returnValue = function_.apply(this, arguments_); - function_ = null; - } else if (options.throw === true) { - throw new Error(`Function \`${functionName}\` can only be called once`); - } - - return returnValue; - }; - - mimicFn(onetime, function_); - calledFunctions.set(onetime, callCount); - - return onetime; -}; - -module.exports = onetime; -// TODO: Remove this for the next major release -module.exports.default = onetime; - -module.exports.callCount = function_ => { - if (!calledFunctions.has(function_)) { - throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`); - } - - return calledFunctions.get(function_); -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json deleted file mode 100644 index 54caea5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "onetime", - "version": "5.1.2", - "description": "Ensure a function is only called once", - "license": "MIT", - "repository": "sindresorhus/onetime", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "once", - "function", - "one", - "onetime", - "func", - "fn", - "single", - "call", - "called", - "prevent" - ], - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md deleted file mode 100644 index 2d133d3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/onetime/readme.md +++ /dev/null @@ -1,94 +0,0 @@ -# onetime [![Build Status](https://travis-ci.com/sindresorhus/onetime.svg?branch=master)](https://travis-ci.com/github/sindresorhus/onetime) - -> Ensure a function is only called once - -When called multiple times it will return the return value from the first call. - -*Unlike the module [once](https://github.com/isaacs/once), this one isn't naughty and extending `Function.prototype`.* - -## Install - -``` -$ npm install onetime -``` - -## Usage - -```js -const onetime = require('onetime'); - -let i = 0; - -const foo = onetime(() => ++i); - -foo(); //=> 1 -foo(); //=> 1 -foo(); //=> 1 - -onetime.callCount(foo); //=> 3 -``` - -```js -const onetime = require('onetime'); - -const foo = onetime(() => {}, {throw: true}); - -foo(); - -foo(); -//=> Error: Function `foo` can only be called once -``` - -## API - -### onetime(fn, options?) - -Returns a function that only calls `fn` once. - -#### fn - -Type: `Function` - -Function that should only be called once. - -#### options - -Type: `object` - -##### throw - -Type: `boolean`\ -Default: `false` - -Throw an error when called more than once. - -### onetime.callCount(fn) - -Returns a number representing how many times `fn` has been called. - -Note: It throws an error if you pass in a function that is not wrapped by `onetime`. - -```js -const onetime = require('onetime'); - -const foo = onetime(() => {}); - -foo(); -foo(); -foo(); - -console.log(onetime.callCount(foo)); -//=> 3 -``` - -#### fn - -Type: `Function` - -Function to get call count from. - -## onetime for enterprise - -Available as part of the Tidelift Subscription. - -The maintainers of onetime and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-onetime?utm_source=npm-onetime&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts deleted file mode 100644 index 7c575d1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/// - -declare namespace pathKey { - interface Options { - /** - Use a custom environment variables object. Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env). - */ - readonly env?: {[key: string]: string | undefined}; - - /** - Get the PATH key for a specific platform. Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform). - */ - readonly platform?: NodeJS.Platform; - } -} - -declare const pathKey: { - /** - Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform. - - @example - ``` - import pathKey = require('path-key'); - - const key = pathKey(); - //=> 'PATH' - - const PATH = process.env[key]; - //=> '/usr/local/bin:/usr/bin:/bin' - ``` - */ - (options?: pathKey.Options): string; - - // TODO: Remove this for the next major release, refactor the whole definition to: - // declare function pathKey(options?: pathKey.Options): string; - // export = pathKey; - default: typeof pathKey; -}; - -export = pathKey; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js deleted file mode 100644 index 0cf6415..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -const pathKey = (options = {}) => { - const environment = options.env || process.env; - const platform = options.platform || process.platform; - - if (platform !== 'win32') { - return 'PATH'; - } - - return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path'; -}; - -module.exports = pathKey; -// TODO: Remove this for the next major release -module.exports.default = pathKey; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json deleted file mode 100644 index c8cbd38..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "path-key", - "version": "3.1.1", - "description": "Get the PATH environment variable key cross-platform", - "license": "MIT", - "repository": "sindresorhus/path-key", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "path", - "key", - "environment", - "env", - "variable", - "var", - "get", - "cross-platform", - "windows" - ], - "devDependencies": { - "@types/node": "^11.13.0", - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md deleted file mode 100644 index a9052d7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/path-key/readme.md +++ /dev/null @@ -1,61 +0,0 @@ -# path-key [![Build Status](https://travis-ci.org/sindresorhus/path-key.svg?branch=master)](https://travis-ci.org/sindresorhus/path-key) - -> Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform - -It's usually `PATH`, but on Windows it can be any casing like `Path`... - - -## Install - -``` -$ npm install path-key -``` - - -## Usage - -```js -const pathKey = require('path-key'); - -const key = pathKey(); -//=> 'PATH' - -const PATH = process.env[key]; -//=> '/usr/local/bin:/usr/bin:/bin' -``` - - -## API - -### pathKey(options?) - -#### options - -Type: `object` - -##### env - -Type: `object`
-Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env) - -Use a custom environment variables object. - -#### platform - -Type: `string`
-Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform) - -Get the PATH key for a specific platform. - - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md deleted file mode 100644 index 8ccc6c1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/CHANGELOG.md +++ /dev/null @@ -1,136 +0,0 @@ -# Release history - -**All notable changes to this project will be documented in this file.** - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -
- Guiding Principles - -- Changelogs are for humans, not machines. -- There should be an entry for every single version. -- The same types of changes should be grouped. -- Versions and sections should be linkable. -- The latest version comes first. -- The release date of each versions is displayed. -- Mention whether you follow Semantic Versioning. - -
- -
- Types of changes - -Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): - -- `Added` for new features. -- `Changed` for changes in existing functionality. -- `Deprecated` for soon-to-be removed features. -- `Removed` for now removed features. -- `Fixed` for any bug fixes. -- `Security` in case of vulnerabilities. - -
- -## 2.3.1 (2022-01-02) - -### Fixed - -* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). - -### Changed - -* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). - -## 2.3.0 (2021-05-21) - -### Fixed - -* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) - -## 2.2.3 (2021-04-10) - -### Fixed - -* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). -* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). - -## 2.2.2 (2020-03-21) - -### Fixed - -* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). - -## 2.2.1 (2020-01-04) - -* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. - -## 2.2.0 (2020-01-04) - -* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) -* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. - -## 2.1.0 (2019-10-31) - -* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) -* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) -* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) -* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) -* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) -* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) -* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) -* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) -* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) -* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) -* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) -* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) -* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) -* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) -* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) -* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) -* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) -* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) -* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) -* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) -* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) -* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) - -## 2.0.7 (2019-05-14) - -* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) -* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) -* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) -* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) - -## 2.0.4 (2019-04-10) - -### Fixed - -- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. -- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. - -## 2.0.0 (2019-04-10) - -### Added - -- Adds support for `options.onIgnore`. See the readme for details -- Adds support for `options.onResult`. See the readme for details - -### Breaking changes - -- The unixify option was renamed to `windows` -- caching and all related options and methods have been removed - -## 1.0.0 (2018-11-05) - -- adds `.onMatch` option -- improvements to `.scan` method -- numerous improvements and optimizations for matching and parsing -- better windows path handling - -## 0.1.0 - 2017-04-13 - -First release. - - -[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE deleted file mode 100644 index 3608dca..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2017-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md deleted file mode 100644 index b0526e2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/README.md +++ /dev/null @@ -1,708 +0,0 @@ -

Picomatch

- -

- -version - - -test status - - -coverage status - - -downloads - -

- -
-
- -

-Blazing fast and accurate glob matcher written in JavaScript.
-No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. -

- -
-
- -## Why picomatch? - -* **Lightweight** - No dependencies -* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. -* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) -* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) -* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. -* **Well tested** - Thousands of unit tests - -See the [library comparison](#library-comparisons) to other libraries. - -
-
- -## Table of Contents - -
Click to expand - -- [Install](#install) -- [Usage](#usage) -- [API](#api) - * [picomatch](#picomatch) - * [.test](#test) - * [.matchBase](#matchbase) - * [.isMatch](#ismatch) - * [.parse](#parse) - * [.scan](#scan) - * [.compileRe](#compilere) - * [.makeRe](#makere) - * [.toRegex](#toregex) -- [Options](#options) - * [Picomatch options](#picomatch-options) - * [Scan Options](#scan-options) - * [Options Examples](#options-examples) -- [Globbing features](#globbing-features) - * [Basic globbing](#basic-globbing) - * [Advanced globbing](#advanced-globbing) - * [Braces](#braces) - * [Matching special characters as literals](#matching-special-characters-as-literals) -- [Library Comparisons](#library-comparisons) -- [Benchmarks](#benchmarks) -- [Philosophies](#philosophies) -- [About](#about) - * [Author](#author) - * [License](#license) - -_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ - -
- -
-
- -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -npm install --save picomatch -``` - -
- -## Usage - -The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. - -```js -const pm = require('picomatch'); -const isMatch = pm('*.js'); - -console.log(isMatch('abcd')); //=> false -console.log(isMatch('a.js')); //=> true -console.log(isMatch('a.md')); //=> false -console.log(isMatch('a/b.js')); //=> false -``` - -
- -## API - -### [picomatch](lib/picomatch.js#L32) - -Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. - -**Params** - -* `globs` **{String|Array}**: One or more glob patterns. -* `options` **{Object=}** -* `returns` **{Function=}**: Returns a matcher function. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch(glob[, options]); - -const isMatch = picomatch('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.test](lib/picomatch.js#L117) - -Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. - -**Params** - -* `input` **{String}**: String to test. -* `regex` **{RegExp}** -* `returns` **{Object}**: Returns an object with matching info. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.test(input, regex[, options]); - -console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); -// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } -``` - -### [.matchBase](lib/picomatch.js#L161) - -Match the basename of a filepath. - -**Params** - -* `input` **{String}**: String to test. -* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). -* `returns` **{Boolean}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.matchBase(input, glob[, options]); -console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true -``` - -### [.isMatch](lib/picomatch.js#L183) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* **{String|Array}**: str The string to test. -* **{String|Array}**: patterns One or more glob patterns to use for matching. -* **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.isMatch(string, patterns[, options]); - -console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(picomatch.isMatch('a.a', 'b.*')); //=> false -``` - -### [.parse](lib/picomatch.js#L199) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.parse(pattern[, options]); -``` - -### [.scan](lib/picomatch.js#L231) - -Scan a glob pattern to separate the pattern into segments. - -**Params** - -* `input` **{String}**: Glob pattern to scan. -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.scan(input[, options]); - -const result = picomatch.scan('!./foo/*.js'); -console.log(result); -{ prefix: '!./', - input: '!./foo/*.js', - start: 3, - base: 'foo', - glob: '*.js', - isBrace: false, - isBracket: false, - isGlob: true, - isExtglob: false, - isGlobstar: false, - negated: true } -``` - -### [.compileRe](lib/picomatch.js#L245) - -Compile a regular expression from the `state` object returned by the -[parse()](#parse) method. - -**Params** - -* `state` **{Object}** -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. -* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. -* `returns` **{RegExp}** - -### [.makeRe](lib/picomatch.js#L286) - -Create a regular expression from a parsed glob pattern. - -**Params** - -* `state` **{String}**: The object returned from the `.parse` method. -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. -* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const picomatch = require('picomatch'); -const state = picomatch.parse('*.js'); -// picomatch.compileRe(state[, options]); - -console.log(picomatch.compileRe(state)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -### [.toRegex](lib/picomatch.js#L321) - -Create a regular expression from the given regex source string. - -**Params** - -* `source` **{String}**: Regular expression source string. -* `options` **{Object}** -* `returns` **{RegExp}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.toRegex(source[, options]); - -const { output } = picomatch.parse('*.js'); -console.log(picomatch.toRegex(output)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -
- -## Options - -### Picomatch options - -The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | -| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | - -picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. - -### Scan Options - -In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | -| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.scan('!./foo/*.js', { tokens: true }); -console.log(result); -// { -// prefix: '!./', -// input: '!./foo/*.js', -// start: 3, -// base: 'foo', -// glob: '*.js', -// isBrace: false, -// isBracket: false, -// isGlob: true, -// isExtglob: false, -// isGlobstar: false, -// negated: true, -// maxDepth: 2, -// tokens: [ -// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, -// { value: 'foo', depth: 1, isGlob: false }, -// { value: '*.js', depth: 1, isGlob: true } -// ], -// slashes: [ 2, 6 ], -// parts: [ 'foo', '*.js' ] -// } -``` - -
- -### Options Examples - -#### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a folder - -```js -const fill = require('fill-range'); -const regex = pm.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex); -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -#### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')); //=> true -``` - -#### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onMatch }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -
-
- -## Globbing features - -* [Basic globbing](#basic-globbing) (Wildcard matching) -* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) - -### Basic globbing - -| **Character** | **Description** | -| --- | --- | -| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | -| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | -| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | -| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | - -#### Matching behavior vs. Bash - -Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: - -* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. -* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. - -
- -### Advanced globbing - -* [extglobs](#extglobs) -* [POSIX brackets](#posix-brackets) -* [Braces](#brace-expansion) - -#### Extglobs - -| **Pattern** | **Description** | -| --- | --- | -| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | -| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | -| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | -| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | -| `!(pattern)` | Match _anything but_ `pattern` | - -**Examples** - -```js -const pm = require('picomatch'); - -// *(pattern) matches ZERO or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// +(pattern) matches ONE or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// supports multiple extglobs -console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false - -// supports nested extglobs -console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true -``` - -#### POSIX brackets - -POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. - -**Enable POSIX bracket support** - -```js -console.log(pm.makeRe('[[:word:]]+', { posix: true })); -//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ -``` - -**Supported POSIX classes** - -The following named POSIX bracket expressions are supported: - -* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` -* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. -* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. -* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. -* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. -* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. -* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. -* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. -* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. -* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. -* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. -* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. -* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. -* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. - -See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. - -### Braces - -Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. - -### Matching special characters as literals - -If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: - -**Special Characters** - -Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. - -To match any of the following characters as literals: `$^*+?()[] - -Examples: - -```js -console.log(pm.makeRe('foo/bar \\(1\\)')); -console.log(pm.makeRe('foo/bar \\(1\\)')); -``` - -
-
- -## Library Comparisons - -The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). - -| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | -| --- | --- | --- | --- | --- | --- | --- | --- | -| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | -| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | -| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | -| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | -| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | -| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | -| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | -| File system operations | - | - | - | - | - | - | - | - -
-
- -## Benchmarks - -Performance comparison of picomatch and minimatch. - -``` -# .makeRe star - picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) - minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) - -# .makeRe star; dot=true - picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) - minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) - -# .makeRe globstar - picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) - minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) - -# .makeRe globstars - picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) - minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) - -# .makeRe with leading star - picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) - minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) - -# .makeRe - basic braces - picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) - minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) -``` - -
-
- -## Philosophies - -The goal of this library is to be blazing fast, without compromising on accuracy. - -**Accuracy** - -The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. - -Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. - -**Performance** - -Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. - -
-
- -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js deleted file mode 100644 index d2f2bc5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/index.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict'; - -module.exports = require('./lib/picomatch'); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js deleted file mode 100644 index a62ef38..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/constants.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict'; - -const path = require('path'); -const WIN_SLASH = '\\\\/'; -const WIN_NO_SLASH = `[^${WIN_SLASH}]`; - -/** - * Posix glob regex - */ - -const DOT_LITERAL = '\\.'; -const PLUS_LITERAL = '\\+'; -const QMARK_LITERAL = '\\?'; -const SLASH_LITERAL = '\\/'; -const ONE_CHAR = '(?=.)'; -const QMARK = '[^/]'; -const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; -const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; -const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; -const NO_DOT = `(?!${DOT_LITERAL})`; -const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; -const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; -const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; -const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; -const STAR = `${QMARK}*?`; - -const POSIX_CHARS = { - DOT_LITERAL, - PLUS_LITERAL, - QMARK_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - QMARK, - END_ANCHOR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK_NO_DOT, - STAR, - START_ANCHOR -}; - -/** - * Windows glob regex - */ - -const WINDOWS_CHARS = { - ...POSIX_CHARS, - - SLASH_LITERAL: `[${WIN_SLASH}]`, - QMARK: WIN_NO_SLASH, - STAR: `${WIN_NO_SLASH}*?`, - DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, - NO_DOT: `(?!${DOT_LITERAL})`, - NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, - NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - QMARK_NO_DOT: `[^.${WIN_SLASH}]`, - START_ANCHOR: `(?:^|[${WIN_SLASH}])`, - END_ANCHOR: `(?:[${WIN_SLASH}]|$)` -}; - -/** - * POSIX Bracket Regex - */ - -const POSIX_REGEX_SOURCE = { - alnum: 'a-zA-Z0-9', - alpha: 'a-zA-Z', - ascii: '\\x00-\\x7F', - blank: ' \\t', - cntrl: '\\x00-\\x1F\\x7F', - digit: '0-9', - graph: '\\x21-\\x7E', - lower: 'a-z', - print: '\\x20-\\x7E ', - punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', - space: ' \\t\\r\\n\\v\\f', - upper: 'A-Z', - word: 'A-Za-z0-9_', - xdigit: 'A-Fa-f0-9' -}; - -module.exports = { - MAX_LENGTH: 1024 * 64, - POSIX_REGEX_SOURCE, - - // regular expressions - REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, - REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, - REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, - REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, - REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, - REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, - - // Replace globs with equivalent patterns to reduce parsing time. - REPLACEMENTS: { - '***': '*', - '**/**': '**', - '**/**/**': '**' - }, - - // Digits - CHAR_0: 48, /* 0 */ - CHAR_9: 57, /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 65, /* A */ - CHAR_LOWERCASE_A: 97, /* a */ - CHAR_UPPERCASE_Z: 90, /* Z */ - CHAR_LOWERCASE_Z: 122, /* z */ - - CHAR_LEFT_PARENTHESES: 40, /* ( */ - CHAR_RIGHT_PARENTHESES: 41, /* ) */ - - CHAR_ASTERISK: 42, /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: 38, /* & */ - CHAR_AT: 64, /* @ */ - CHAR_BACKWARD_SLASH: 92, /* \ */ - CHAR_CARRIAGE_RETURN: 13, /* \r */ - CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ - CHAR_COLON: 58, /* : */ - CHAR_COMMA: 44, /* , */ - CHAR_DOT: 46, /* . */ - CHAR_DOUBLE_QUOTE: 34, /* " */ - CHAR_EQUAL: 61, /* = */ - CHAR_EXCLAMATION_MARK: 33, /* ! */ - CHAR_FORM_FEED: 12, /* \f */ - CHAR_FORWARD_SLASH: 47, /* / */ - CHAR_GRAVE_ACCENT: 96, /* ` */ - CHAR_HASH: 35, /* # */ - CHAR_HYPHEN_MINUS: 45, /* - */ - CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ - CHAR_LEFT_CURLY_BRACE: 123, /* { */ - CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ - CHAR_LINE_FEED: 10, /* \n */ - CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ - CHAR_PERCENT: 37, /* % */ - CHAR_PLUS: 43, /* + */ - CHAR_QUESTION_MARK: 63, /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ - CHAR_RIGHT_CURLY_BRACE: 125, /* } */ - CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ - CHAR_SEMICOLON: 59, /* ; */ - CHAR_SINGLE_QUOTE: 39, /* ' */ - CHAR_SPACE: 32, /* */ - CHAR_TAB: 9, /* \t */ - CHAR_UNDERSCORE: 95, /* _ */ - CHAR_VERTICAL_LINE: 124, /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - - SEP: path.sep, - - /** - * Create EXTGLOB_CHARS - */ - - extglobChars(chars) { - return { - '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, - '?': { type: 'qmark', open: '(?:', close: ')?' }, - '+': { type: 'plus', open: '(?:', close: ')+' }, - '*': { type: 'star', open: '(?:', close: ')*' }, - '@': { type: 'at', open: '(?:', close: ')' } - }; - }, - - /** - * Create GLOB_CHARS - */ - - globChars(win32) { - return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; - } -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js deleted file mode 100644 index 58269d0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/parse.js +++ /dev/null @@ -1,1091 +0,0 @@ -'use strict'; - -const constants = require('./constants'); -const utils = require('./utils'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - POSIX_REGEX_SOURCE, - REGEX_NON_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_BACKREF, - REPLACEMENTS -} = constants; - -/** - * Helpers - */ - -const expandRange = (args, options) => { - if (typeof options.expandRange === 'function') { - return options.expandRange(...args, options); - } - - args.sort(); - const value = `[${args.join('-')}]`; - - try { - /* eslint-disable-next-line no-new */ - new RegExp(value); - } catch (ex) { - return args.map(v => utils.escapeRegex(v)).join('..'); - } - - return value; -}; - -/** - * Create the message for a syntax error - */ - -const syntaxError = (type, char) => { - return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; -}; - -/** - * Parse the given input string. - * @param {String} input - * @param {Object} options - * @return {Object} - */ - -const parse = (input, options) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - input = REPLACEMENTS[input] || input; - - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - - let len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - const bos = { type: 'bos', value: '', output: opts.prepend || '' }; - const tokens = [bos]; - - const capture = opts.capture ? '' : '?:'; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const PLATFORM_CHARS = constants.globChars(win32); - const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); - - const { - DOT_LITERAL, - PLUS_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK, - QMARK_NO_DOT, - STAR, - START_ANCHOR - } = PLATFORM_CHARS; - - const globstar = opts => { - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const nodot = opts.dot ? '' : NO_DOT; - const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; - let star = opts.bash === true ? globstar(opts) : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - // minimatch options support - if (typeof opts.noext === 'boolean') { - opts.noextglob = opts.noext; - } - - const state = { - input, - index: -1, - start: 0, - dot: opts.dot === true, - consumed: '', - output: '', - prefix: '', - backtrack: false, - negated: false, - brackets: 0, - braces: 0, - parens: 0, - quotes: 0, - globstar: false, - tokens - }; - - input = utils.removePrefix(input, state); - len = input.length; - - const extglobs = []; - const braces = []; - const stack = []; - let prev = bos; - let value; - - /** - * Tokenizing helpers - */ - - const eos = () => state.index === len - 1; - const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index] || ''; - const remaining = () => input.slice(state.index + 1); - const consume = (value = '', num = 0) => { - state.consumed += value; - state.index += num; - }; - - const append = token => { - state.output += token.output != null ? token.output : token.value; - consume(token.value); - }; - - const negate = () => { - let count = 1; - - while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { - advance(); - state.start++; - count++; - } - - if (count % 2 === 0) { - return false; - } - - state.negated = true; - state.start++; - return true; - }; - - const increment = type => { - state[type]++; - stack.push(type); - }; - - const decrement = type => { - state[type]--; - stack.pop(); - }; - - /** - * Push tokens onto the tokens array. This helper speeds up - * tokenizing by 1) helping us avoid backtracking as much as possible, - * and 2) helping us avoid creating extra tokens when consecutive - * characters are plain text. This improves performance and simplifies - * lookbehinds. - */ - - const push = tok => { - if (prev.type === 'globstar') { - const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); - const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); - - if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { - state.output = state.output.slice(0, -prev.output.length); - prev.type = 'star'; - prev.value = '*'; - prev.output = star; - state.output += prev.output; - } - } - - if (extglobs.length && tok.type !== 'paren') { - extglobs[extglobs.length - 1].inner += tok.value; - } - - if (tok.value || tok.output) append(tok); - if (prev && prev.type === 'text' && tok.type === 'text') { - prev.value += tok.value; - prev.output = (prev.output || '') + tok.value; - return; - } - - tok.prev = prev; - tokens.push(tok); - prev = tok; - }; - - const extglobOpen = (type, value) => { - const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; - - token.prev = prev; - token.parens = state.parens; - token.output = state.output; - const output = (opts.capture ? '(' : '') + token.open; - - increment('parens'); - push({ type, value, output: state.output ? '' : ONE_CHAR }); - push({ type: 'paren', extglob: true, value: advance(), output }); - extglobs.push(token); - }; - - const extglobClose = token => { - let output = token.close + (opts.capture ? ')' : ''); - let rest; - - if (token.type === 'negate') { - let extglobStar = star; - - if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { - extglobStar = globstar(opts); - } - - if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { - output = token.close = `)$))${extglobStar}`; - } - - if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { - // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. - // In this case, we need to parse the string and use it in the output of the original pattern. - // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. - // - // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. - const expression = parse(rest, { ...options, fastpaths: false }).output; - - output = token.close = `)${expression})${extglobStar})`; - } - - if (token.prev.type === 'bos') { - state.negatedExtglob = true; - } - } - - push({ type: 'paren', extglob: true, value, output }); - decrement('parens'); - }; - - /** - * Fast paths - */ - - if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { - let backslashes = false; - - let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { - if (first === '\\') { - backslashes = true; - return m; - } - - if (first === '?') { - if (esc) { - return esc + first + (rest ? QMARK.repeat(rest.length) : ''); - } - if (index === 0) { - return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); - } - return QMARK.repeat(chars.length); - } - - if (first === '.') { - return DOT_LITERAL.repeat(chars.length); - } - - if (first === '*') { - if (esc) { - return esc + first + (rest ? star : ''); - } - return star; - } - return esc ? m : `\\${m}`; - }); - - if (backslashes === true) { - if (opts.unescape === true) { - output = output.replace(/\\/g, ''); - } else { - output = output.replace(/\\+/g, m => { - return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); - }); - } - } - - if (output === input && opts.contains === true) { - state.output = input; - return state; - } - - state.output = utils.wrapOutput(output, state, options); - return state; - } - - /** - * Tokenize input until we reach end-of-string - */ - - while (!eos()) { - value = advance(); - - if (value === '\u0000') { - continue; - } - - /** - * Escaped characters - */ - - if (value === '\\') { - const next = peek(); - - if (next === '/' && opts.bash !== true) { - continue; - } - - if (next === '.' || next === ';') { - continue; - } - - if (!next) { - value += '\\'; - push({ type: 'text', value }); - continue; - } - - // collapse slashes to reduce potential for exploits - const match = /^\\+/.exec(remaining()); - let slashes = 0; - - if (match && match[0].length > 2) { - slashes = match[0].length; - state.index += slashes; - if (slashes % 2 !== 0) { - value += '\\'; - } - } - - if (opts.unescape === true) { - value = advance(); - } else { - value += advance(); - } - - if (state.brackets === 0) { - push({ type: 'text', value }); - continue; - } - } - - /** - * If we're inside a regex character class, continue - * until we reach the closing bracket. - */ - - if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { - if (opts.posix !== false && value === ':') { - const inner = prev.value.slice(1); - if (inner.includes('[')) { - prev.posix = true; - - if (inner.includes(':')) { - const idx = prev.value.lastIndexOf('['); - const pre = prev.value.slice(0, idx); - const rest = prev.value.slice(idx + 2); - const posix = POSIX_REGEX_SOURCE[rest]; - if (posix) { - prev.value = pre + posix; - state.backtrack = true; - advance(); - - if (!bos.output && tokens.indexOf(prev) === 1) { - bos.output = ONE_CHAR; - } - continue; - } - } - } - } - - if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { - value = `\\${value}`; - } - - if (value === ']' && (prev.value === '[' || prev.value === '[^')) { - value = `\\${value}`; - } - - if (opts.posix === true && value === '!' && prev.value === '[') { - value = '^'; - } - - prev.value += value; - append({ value }); - continue; - } - - /** - * If we're inside a quoted string, continue - * until we reach the closing double quote. - */ - - if (state.quotes === 1 && value !== '"') { - value = utils.escapeRegex(value); - prev.value += value; - append({ value }); - continue; - } - - /** - * Double quotes - */ - - if (value === '"') { - state.quotes = state.quotes === 1 ? 0 : 1; - if (opts.keepQuotes === true) { - push({ type: 'text', value }); - } - continue; - } - - /** - * Parentheses - */ - - if (value === '(') { - increment('parens'); - push({ type: 'paren', value }); - continue; - } - - if (value === ')') { - if (state.parens === 0 && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '(')); - } - - const extglob = extglobs[extglobs.length - 1]; - if (extglob && state.parens === extglob.parens + 1) { - extglobClose(extglobs.pop()); - continue; - } - - push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); - decrement('parens'); - continue; - } - - /** - * Square brackets - */ - - if (value === '[') { - if (opts.nobracket === true || !remaining().includes(']')) { - if (opts.nobracket !== true && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('closing', ']')); - } - - value = `\\${value}`; - } else { - increment('brackets'); - } - - push({ type: 'bracket', value }); - continue; - } - - if (value === ']') { - if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - if (state.brackets === 0) { - if (opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '[')); - } - - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - decrement('brackets'); - - const prevValue = prev.value.slice(1); - if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { - value = `/${value}`; - } - - prev.value += value; - append({ value }); - - // when literal brackets are explicitly disabled - // assume we should match with a regex character class - if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { - continue; - } - - const escaped = utils.escapeRegex(prev.value); - state.output = state.output.slice(0, -prev.value.length); - - // when literal brackets are explicitly enabled - // assume we should escape the brackets to match literal characters - if (opts.literalBrackets === true) { - state.output += escaped; - prev.value = escaped; - continue; - } - - // when the user specifies nothing, try to match both - prev.value = `(${capture}${escaped}|${prev.value})`; - state.output += prev.value; - continue; - } - - /** - * Braces - */ - - if (value === '{' && opts.nobrace !== true) { - increment('braces'); - - const open = { - type: 'brace', - value, - output: '(', - outputIndex: state.output.length, - tokensIndex: state.tokens.length - }; - - braces.push(open); - push(open); - continue; - } - - if (value === '}') { - const brace = braces[braces.length - 1]; - - if (opts.nobrace === true || !brace) { - push({ type: 'text', value, output: value }); - continue; - } - - let output = ')'; - - if (brace.dots === true) { - const arr = tokens.slice(); - const range = []; - - for (let i = arr.length - 1; i >= 0; i--) { - tokens.pop(); - if (arr[i].type === 'brace') { - break; - } - if (arr[i].type !== 'dots') { - range.unshift(arr[i].value); - } - } - - output = expandRange(range, opts); - state.backtrack = true; - } - - if (brace.comma !== true && brace.dots !== true) { - const out = state.output.slice(0, brace.outputIndex); - const toks = state.tokens.slice(brace.tokensIndex); - brace.value = brace.output = '\\{'; - value = output = '\\}'; - state.output = out; - for (const t of toks) { - state.output += (t.output || t.value); - } - } - - push({ type: 'brace', value, output }); - decrement('braces'); - braces.pop(); - continue; - } - - /** - * Pipes - */ - - if (value === '|') { - if (extglobs.length > 0) { - extglobs[extglobs.length - 1].conditions++; - } - push({ type: 'text', value }); - continue; - } - - /** - * Commas - */ - - if (value === ',') { - let output = value; - - const brace = braces[braces.length - 1]; - if (brace && stack[stack.length - 1] === 'braces') { - brace.comma = true; - output = '|'; - } - - push({ type: 'comma', value, output }); - continue; - } - - /** - * Slashes - */ - - if (value === '/') { - // if the beginning of the glob is "./", advance the start - // to the current index, and don't add the "./" characters - // to the state. This greatly simplifies lookbehinds when - // checking for BOS characters like "!" and "." (not "./") - if (prev.type === 'dot' && state.index === state.start + 1) { - state.start = state.index + 1; - state.consumed = ''; - state.output = ''; - tokens.pop(); - prev = bos; // reset "prev" to the first token - continue; - } - - push({ type: 'slash', value, output: SLASH_LITERAL }); - continue; - } - - /** - * Dots - */ - - if (value === '.') { - if (state.braces > 0 && prev.type === 'dot') { - if (prev.value === '.') prev.output = DOT_LITERAL; - const brace = braces[braces.length - 1]; - prev.type = 'dots'; - prev.output += value; - prev.value += value; - brace.dots = true; - continue; - } - - if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { - push({ type: 'text', value, output: DOT_LITERAL }); - continue; - } - - push({ type: 'dot', value, output: DOT_LITERAL }); - continue; - } - - /** - * Question marks - */ - - if (value === '?') { - const isGroup = prev && prev.value === '('; - if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('qmark', value); - continue; - } - - if (prev && prev.type === 'paren') { - const next = peek(); - let output = value; - - if (next === '<' && !utils.supportsLookbehinds()) { - throw new Error('Node.js v10 or higher is required for regex lookbehinds'); - } - - if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { - output = `\\${value}`; - } - - push({ type: 'text', value, output }); - continue; - } - - if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { - push({ type: 'qmark', value, output: QMARK_NO_DOT }); - continue; - } - - push({ type: 'qmark', value, output: QMARK }); - continue; - } - - /** - * Exclamation - */ - - if (value === '!') { - if (opts.noextglob !== true && peek() === '(') { - if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { - extglobOpen('negate', value); - continue; - } - } - - if (opts.nonegate !== true && state.index === 0) { - negate(); - continue; - } - } - - /** - * Plus - */ - - if (value === '+') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('plus', value); - continue; - } - - if ((prev && prev.value === '(') || opts.regex === false) { - push({ type: 'plus', value, output: PLUS_LITERAL }); - continue; - } - - if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { - push({ type: 'plus', value }); - continue; - } - - push({ type: 'plus', value: PLUS_LITERAL }); - continue; - } - - /** - * Plain text - */ - - if (value === '@') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - push({ type: 'at', extglob: true, value, output: '' }); - continue; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Plain text - */ - - if (value !== '*') { - if (value === '$' || value === '^') { - value = `\\${value}`; - } - - const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); - if (match) { - value += match[0]; - state.index += match[0].length; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Stars - */ - - if (prev && (prev.type === 'globstar' || prev.star === true)) { - prev.type = 'star'; - prev.star = true; - prev.value += value; - prev.output = star; - state.backtrack = true; - state.globstar = true; - consume(value); - continue; - } - - let rest = remaining(); - if (opts.noextglob !== true && /^\([^?]/.test(rest)) { - extglobOpen('star', value); - continue; - } - - if (prev.type === 'star') { - if (opts.noglobstar === true) { - consume(value); - continue; - } - - const prior = prev.prev; - const before = prior.prev; - const isStart = prior.type === 'slash' || prior.type === 'bos'; - const afterStar = before && (before.type === 'star' || before.type === 'globstar'); - - if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { - push({ type: 'star', value, output: '' }); - continue; - } - - const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); - const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); - if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { - push({ type: 'star', value, output: '' }); - continue; - } - - // strip consecutive `/**/` - while (rest.slice(0, 3) === '/**') { - const after = input[state.index + 4]; - if (after && after !== '/') { - break; - } - rest = rest.slice(3); - consume('/**', 3); - } - - if (prior.type === 'bos' && eos()) { - prev.type = 'globstar'; - prev.value += value; - prev.output = globstar(opts); - state.output = prev.output; - state.globstar = true; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); - prev.value += value; - state.globstar = true; - state.output += prior.output + prev.output; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { - const end = rest[1] !== void 0 ? '|$' : ''; - - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; - prev.value += value; - - state.output += prior.output + prev.output; - state.globstar = true; - - consume(value + advance()); - - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - if (prior.type === 'bos' && rest[0] === '/') { - prev.type = 'globstar'; - prev.value += value; - prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; - state.output = prev.output; - state.globstar = true; - consume(value + advance()); - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - // remove single star from output - state.output = state.output.slice(0, -prev.output.length); - - // reset previous token to globstar - prev.type = 'globstar'; - prev.output = globstar(opts); - prev.value += value; - - // reset output with globstar - state.output += prev.output; - state.globstar = true; - consume(value); - continue; - } - - const token = { type: 'star', value, output: star }; - - if (opts.bash === true) { - token.output = '.*?'; - if (prev.type === 'bos' || prev.type === 'slash') { - token.output = nodot + token.output; - } - push(token); - continue; - } - - if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { - token.output = value; - push(token); - continue; - } - - if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { - if (prev.type === 'dot') { - state.output += NO_DOT_SLASH; - prev.output += NO_DOT_SLASH; - - } else if (opts.dot === true) { - state.output += NO_DOTS_SLASH; - prev.output += NO_DOTS_SLASH; - - } else { - state.output += nodot; - prev.output += nodot; - } - - if (peek() !== '*') { - state.output += ONE_CHAR; - prev.output += ONE_CHAR; - } - } - - push(token); - } - - while (state.brackets > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); - state.output = utils.escapeLast(state.output, '['); - decrement('brackets'); - } - - while (state.parens > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); - state.output = utils.escapeLast(state.output, '('); - decrement('parens'); - } - - while (state.braces > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); - state.output = utils.escapeLast(state.output, '{'); - decrement('braces'); - } - - if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { - push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); - } - - // rebuild the output if we had to backtrack at any point - if (state.backtrack === true) { - state.output = ''; - - for (const token of state.tokens) { - state.output += token.output != null ? token.output : token.value; - - if (token.suffix) { - state.output += token.suffix; - } - } - } - - return state; -}; - -/** - * Fast paths for creating regular expressions for common glob patterns. - * This can significantly speed up processing and has very little downside - * impact when none of the fast paths match. - */ - -parse.fastpaths = (input, options) => { - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - const len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - input = REPLACEMENTS[input] || input; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const { - DOT_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOTS_SLASH, - STAR, - START_ANCHOR - } = constants.globChars(win32); - - const nodot = opts.dot ? NO_DOTS : NO_DOT; - const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; - const capture = opts.capture ? '' : '?:'; - const state = { negated: false, prefix: '' }; - let star = opts.bash === true ? '.*?' : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - const globstar = opts => { - if (opts.noglobstar === true) return star; - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const create = str => { - switch (str) { - case '*': - return `${nodot}${ONE_CHAR}${star}`; - - case '.*': - return `${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*.*': - return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*/*': - return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; - - case '**': - return nodot + globstar(opts); - - case '**/*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; - - case '**/*.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '**/.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; - - default: { - const match = /^(.*?)\.(\w+)$/.exec(str); - if (!match) return; - - const source = create(match[1]); - if (!source) return; - - return source + DOT_LITERAL + match[2]; - } - } - }; - - const output = utils.removePrefix(input, state); - let source = create(output); - - if (source && opts.strictSlashes !== true) { - source += `${SLASH_LITERAL}?`; - } - - return source; -}; - -module.exports = parse; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js deleted file mode 100644 index 782d809..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/picomatch.js +++ /dev/null @@ -1,342 +0,0 @@ -'use strict'; - -const path = require('path'); -const scan = require('./scan'); -const parse = require('./parse'); -const utils = require('./utils'); -const constants = require('./constants'); -const isObject = val => val && typeof val === 'object' && !Array.isArray(val); - -/** - * Creates a matcher function from one or more glob patterns. The - * returned function takes a string to match as its first argument, - * and returns true if the string is a match. The returned matcher - * function also takes a boolean as the second argument that, when true, - * returns an object with additional information. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch(glob[, options]); - * - * const isMatch = picomatch('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @name picomatch - * @param {String|Array} `globs` One or more glob patterns. - * @param {Object=} `options` - * @return {Function=} Returns a matcher function. - * @api public - */ - -const picomatch = (glob, options, returnState = false) => { - if (Array.isArray(glob)) { - const fns = glob.map(input => picomatch(input, options, returnState)); - const arrayMatcher = str => { - for (const isMatch of fns) { - const state = isMatch(str); - if (state) return state; - } - return false; - }; - return arrayMatcher; - } - - const isState = isObject(glob) && glob.tokens && glob.input; - - if (glob === '' || (typeof glob !== 'string' && !isState)) { - throw new TypeError('Expected pattern to be a non-empty string'); - } - - const opts = options || {}; - const posix = utils.isWindows(options); - const regex = isState - ? picomatch.compileRe(glob, options) - : picomatch.makeRe(glob, options, false, true); - - const state = regex.state; - delete regex.state; - - let isIgnored = () => false; - if (opts.ignore) { - const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; - isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); - } - - const matcher = (input, returnObject = false) => { - const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); - const result = { glob, state, regex, posix, input, output, match, isMatch }; - - if (typeof opts.onResult === 'function') { - opts.onResult(result); - } - - if (isMatch === false) { - result.isMatch = false; - return returnObject ? result : false; - } - - if (isIgnored(input)) { - if (typeof opts.onIgnore === 'function') { - opts.onIgnore(result); - } - result.isMatch = false; - return returnObject ? result : false; - } - - if (typeof opts.onMatch === 'function') { - opts.onMatch(result); - } - return returnObject ? result : true; - }; - - if (returnState) { - matcher.state = state; - } - - return matcher; -}; - -/** - * Test `input` with the given `regex`. This is used by the main - * `picomatch()` function to test the input string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.test(input, regex[, options]); - * - * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); - * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } - * ``` - * @param {String} `input` String to test. - * @param {RegExp} `regex` - * @return {Object} Returns an object with matching info. - * @api public - */ - -picomatch.test = (input, regex, options, { glob, posix } = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected input to be a string'); - } - - if (input === '') { - return { isMatch: false, output: '' }; - } - - const opts = options || {}; - const format = opts.format || (posix ? utils.toPosixSlashes : null); - let match = input === glob; - let output = (match && format) ? format(input) : input; - - if (match === false) { - output = format ? format(input) : input; - match = output === glob; - } - - if (match === false || opts.capture === true) { - if (opts.matchBase === true || opts.basename === true) { - match = picomatch.matchBase(input, regex, options, posix); - } else { - match = regex.exec(output); - } - } - - return { isMatch: Boolean(match), match, output }; -}; - -/** - * Match the basename of a filepath. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.matchBase(input, glob[, options]); - * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true - * ``` - * @param {String} `input` String to test. - * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). - * @return {Boolean} - * @api public - */ - -picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { - const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); - return regex.test(path.basename(input)); -}; - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.isMatch(string, patterns[, options]); - * - * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String|Array} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const picomatch = require('picomatch'); - * const result = picomatch.parse(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as a regex source string. - * @api public - */ - -picomatch.parse = (pattern, options) => { - if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); - return parse(pattern, { ...options, fastpaths: false }); -}; - -/** - * Scan a glob pattern to separate the pattern into segments. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.scan(input[, options]); - * - * const result = picomatch.scan('!./foo/*.js'); - * console.log(result); - * { prefix: '!./', - * input: '!./foo/*.js', - * start: 3, - * base: 'foo', - * glob: '*.js', - * isBrace: false, - * isBracket: false, - * isGlob: true, - * isExtglob: false, - * isGlobstar: false, - * negated: true } - * ``` - * @param {String} `input` Glob pattern to scan. - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -picomatch.scan = (input, options) => scan(input, options); - -/** - * Compile a regular expression from the `state` object returned by the - * [parse()](#parse) method. - * - * @param {Object} `state` - * @param {Object} `options` - * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. - * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. - * @return {RegExp} - * @api public - */ - -picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { - if (returnOutput === true) { - return state.output; - } - - const opts = options || {}; - const prepend = opts.contains ? '' : '^'; - const append = opts.contains ? '' : '$'; - - let source = `${prepend}(?:${state.output})${append}`; - if (state && state.negated === true) { - source = `^(?!${source}).*$`; - } - - const regex = picomatch.toRegex(source, options); - if (returnState === true) { - regex.state = state; - } - - return regex; -}; - -/** - * Create a regular expression from a parsed glob pattern. - * - * ```js - * const picomatch = require('picomatch'); - * const state = picomatch.parse('*.js'); - * // picomatch.compileRe(state[, options]); - * - * console.log(picomatch.compileRe(state)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `state` The object returned from the `.parse` method. - * @param {Object} `options` - * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. - * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { - if (!input || typeof input !== 'string') { - throw new TypeError('Expected a non-empty string'); - } - - let parsed = { negated: false, fastpaths: true }; - - if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - parsed.output = parse.fastpaths(input, options); - } - - if (!parsed.output) { - parsed = parse(input, options); - } - - return picomatch.compileRe(parsed, options, returnOutput, returnState); -}; - -/** - * Create a regular expression from the given regex source string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.toRegex(source[, options]); - * - * const { output } = picomatch.parse('*.js'); - * console.log(picomatch.toRegex(output)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `source` Regular expression source string. - * @param {Object} `options` - * @return {RegExp} - * @api public - */ - -picomatch.toRegex = (source, options) => { - try { - const opts = options || {}; - return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); - } catch (err) { - if (options && options.debug === true) throw err; - return /$^/; - } -}; - -/** - * Picomatch constants. - * @return {Object} - */ - -picomatch.constants = constants; - -/** - * Expose "picomatch" - */ - -module.exports = picomatch; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js deleted file mode 100644 index e59cd7a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/scan.js +++ /dev/null @@ -1,391 +0,0 @@ -'use strict'; - -const utils = require('./utils'); -const { - CHAR_ASTERISK, /* * */ - CHAR_AT, /* @ */ - CHAR_BACKWARD_SLASH, /* \ */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_EXCLAMATION_MARK, /* ! */ - CHAR_FORWARD_SLASH, /* / */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_PLUS, /* + */ - CHAR_QUESTION_MARK, /* ? */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_RIGHT_SQUARE_BRACKET /* ] */ -} = require('./constants'); - -const isPathSeparator = code => { - return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; -}; - -const depth = token => { - if (token.isPrefix !== true) { - token.depth = token.isGlobstar ? Infinity : 1; - } -}; - -/** - * Quickly scans a glob pattern and returns an object with a handful of - * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not - * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). - * - * ```js - * const pm = require('picomatch'); - * console.log(pm.scan('foo/bar/*.js')); - * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {Object} Returns an object with tokens and regex source string. - * @api public - */ - -const scan = (input, options) => { - const opts = options || {}; - - const length = input.length - 1; - const scanToEnd = opts.parts === true || opts.scanToEnd === true; - const slashes = []; - const tokens = []; - const parts = []; - - let str = input; - let index = -1; - let start = 0; - let lastIndex = 0; - let isBrace = false; - let isBracket = false; - let isGlob = false; - let isExtglob = false; - let isGlobstar = false; - let braceEscaped = false; - let backslashes = false; - let negated = false; - let negatedExtglob = false; - let finished = false; - let braces = 0; - let prev; - let code; - let token = { value: '', depth: 0, isGlob: false }; - - const eos = () => index >= length; - const peek = () => str.charCodeAt(index + 1); - const advance = () => { - prev = code; - return str.charCodeAt(++index); - }; - - while (index < length) { - code = advance(); - let next; - - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - - if (code === CHAR_LEFT_CURLY_BRACE) { - braceEscaped = true; - } - continue; - } - - if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { - braces++; - - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (code === CHAR_LEFT_CURLY_BRACE) { - braces++; - continue; - } - - if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (braceEscaped !== true && code === CHAR_COMMA) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_RIGHT_CURLY_BRACE) { - braces--; - - if (braces === 0) { - braceEscaped = false; - isBrace = token.isBrace = true; - finished = true; - break; - } - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_FORWARD_SLASH) { - slashes.push(index); - tokens.push(token); - token = { value: '', depth: 0, isGlob: false }; - - if (finished === true) continue; - if (prev === CHAR_DOT && index === (start + 1)) { - start += 2; - continue; - } - - lastIndex = index + 1; - continue; - } - - if (opts.noext !== true) { - const isExtglobChar = code === CHAR_PLUS - || code === CHAR_AT - || code === CHAR_ASTERISK - || code === CHAR_QUESTION_MARK - || code === CHAR_EXCLAMATION_MARK; - - if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - isExtglob = token.isExtglob = true; - finished = true; - if (code === CHAR_EXCLAMATION_MARK && index === start) { - negatedExtglob = true; - } - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - isGlob = token.isGlob = true; - finished = true; - break; - } - } - continue; - } - break; - } - } - - if (code === CHAR_ASTERISK) { - if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_QUESTION_MARK) { - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_LEFT_SQUARE_BRACKET) { - while (eos() !== true && (next = advance())) { - if (next === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - isBracket = token.isBracket = true; - isGlob = token.isGlob = true; - finished = true; - break; - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { - negated = token.negated = true; - start++; - continue; - } - - if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_LEFT_PARENTHESES) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - finished = true; - break; - } - } - continue; - } - break; - } - - if (isGlob === true) { - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - } - - if (opts.noext === true) { - isExtglob = false; - isGlob = false; - } - - let base = str; - let prefix = ''; - let glob = ''; - - if (start > 0) { - prefix = str.slice(0, start); - str = str.slice(start); - lastIndex -= start; - } - - if (base && isGlob === true && lastIndex > 0) { - base = str.slice(0, lastIndex); - glob = str.slice(lastIndex); - } else if (isGlob === true) { - base = ''; - glob = str; - } else { - base = str; - } - - if (base && base !== '' && base !== '/' && base !== str) { - if (isPathSeparator(base.charCodeAt(base.length - 1))) { - base = base.slice(0, -1); - } - } - - if (opts.unescape === true) { - if (glob) glob = utils.removeBackslashes(glob); - - if (base && backslashes === true) { - base = utils.removeBackslashes(base); - } - } - - const state = { - prefix, - input, - start, - base, - glob, - isBrace, - isBracket, - isGlob, - isExtglob, - isGlobstar, - negated, - negatedExtglob - }; - - if (opts.tokens === true) { - state.maxDepth = 0; - if (!isPathSeparator(code)) { - tokens.push(token); - } - state.tokens = tokens; - } - - if (opts.parts === true || opts.tokens === true) { - let prevIndex; - - for (let idx = 0; idx < slashes.length; idx++) { - const n = prevIndex ? prevIndex + 1 : start; - const i = slashes[idx]; - const value = input.slice(n, i); - if (opts.tokens) { - if (idx === 0 && start !== 0) { - tokens[idx].isPrefix = true; - tokens[idx].value = prefix; - } else { - tokens[idx].value = value; - } - depth(tokens[idx]); - state.maxDepth += tokens[idx].depth; - } - if (idx !== 0 || value !== '') { - parts.push(value); - } - prevIndex = i; - } - - if (prevIndex && prevIndex + 1 < input.length) { - const value = input.slice(prevIndex + 1); - parts.push(value); - - if (opts.tokens) { - tokens[tokens.length - 1].value = value; - depth(tokens[tokens.length - 1]); - state.maxDepth += tokens[tokens.length - 1].depth; - } - } - - state.slashes = slashes; - state.parts = parts; - } - - return state; -}; - -module.exports = scan; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js deleted file mode 100644 index c3ca766..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/lib/utils.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -const path = require('path'); -const win32 = process.platform === 'win32'; -const { - REGEX_BACKSLASH, - REGEX_REMOVE_BACKSLASH, - REGEX_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_GLOBAL -} = require('./constants'); - -exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); -exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); -exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); -exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); -exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); - -exports.removeBackslashes = str => { - return str.replace(REGEX_REMOVE_BACKSLASH, match => { - return match === '\\' ? '' : match; - }); -}; - -exports.supportsLookbehinds = () => { - const segs = process.version.slice(1).split('.').map(Number); - if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { - return true; - } - return false; -}; - -exports.isWindows = options => { - if (options && typeof options.windows === 'boolean') { - return options.windows; - } - return win32 === true || path.sep === '\\'; -}; - -exports.escapeLast = (input, char, lastIdx) => { - const idx = input.lastIndexOf(char, lastIdx); - if (idx === -1) return input; - if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); - return `${input.slice(0, idx)}\\${input.slice(idx)}`; -}; - -exports.removePrefix = (input, state = {}) => { - let output = input; - if (output.startsWith('./')) { - output = output.slice(2); - state.prefix = './'; - } - return output; -}; - -exports.wrapOutput = (input, state = {}, options = {}) => { - const prepend = options.contains ? '' : '^'; - const append = options.contains ? '' : '$'; - - let output = `${prepend}(?:${input})${append}`; - if (state.negated === true) { - output = `(?:^(?!${output}).*$)`; - } - return output; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json deleted file mode 100644 index 3db22d4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/picomatch/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "picomatch", - "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", - "version": "2.3.1", - "homepage": "https://github.com/micromatch/picomatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "funding": "https://github.com/sponsors/jonschlinkert", - "repository": "micromatch/picomatch", - "bugs": { - "url": "https://github.com/micromatch/picomatch/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "devDependencies": { - "eslint": "^6.8.0", - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.2.2", - "nyc": "^15.0.0", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "glob", - "match", - "picomatch" - ], - "nyc": { - "reporter": [ - "html", - "lcov", - "text-summary" - ] - }, - "verb": { - "toc": { - "render": true, - "method": "preWrite", - "maxdepth": 3 - }, - "layout": "empty", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "micromatch" - ] - }, - "reflinks": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "micromatch", - "minimatch", - "nanomatch", - "picomatch" - ] - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE deleted file mode 100755 index c7e6852..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md deleted file mode 100644 index 0be05a6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master -[ci-url]: https://github.com/feross/queue-microtask/actions -[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg -[npm-url]: https://npmjs.org/package/queue-microtask -[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg -[downloads-url]: https://npmjs.org/package/queue-microtask -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines - -- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. -- No dependencies. Less than 10 lines. No shims or complicated fallbacks. -- Optimal performance in all modern environments - - Uses `queueMicrotask` in modern environments - - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) - -## install - -``` -npm install queue-microtask -``` - -## usage - -```js -const queueMicrotask = require('queue-microtask') - -queueMicrotask(() => { /* this will run soon */ }) -``` - -## What is `queueMicrotask` and why would one use it? - -The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. - -A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. - -The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. - -Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. - -See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. - -## Who is this package for? - -This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. - -If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. - -## Why not use `process.nextTick`? - -In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. - -You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. - -## Why not use `setTimeout(fn, 0)`? - -This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. - -## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? - -These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. - -This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. - -Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. - -Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. - -## What is a shim? - -> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) - -This package could also be described as a "ponyfill". - -> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) - -## API - -### `queueMicrotask(fn)` - -The `queueMicrotask()` method queues a microtask. - -The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. - -## license - -MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts deleted file mode 100644 index b6a8646..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare const queueMicrotask: (cb: () => void) => void -export = queueMicrotask diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js deleted file mode 100644 index 5560534..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/index.js +++ /dev/null @@ -1,9 +0,0 @@ -/*! queue-microtask. MIT License. Feross Aboukhadijeh */ -let promise - -module.exports = typeof queueMicrotask === 'function' - ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) - // reuse resolved promise, and allocate it lazily - : cb => (promise || (promise = Promise.resolve())) - .then(cb) - .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json deleted file mode 100644 index d29a401..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/queue-microtask/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "queue-microtask", - "description": "fast, tiny `queueMicrotask` shim for modern engines", - "version": "1.2.3", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/queue-microtask/issues" - }, - "devDependencies": { - "standard": "*", - "tape": "^5.2.2" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "homepage": "https://github.com/feross/queue-microtask", - "keywords": [ - "asap", - "immediate", - "micro task", - "microtask", - "nextTick", - "process.nextTick", - "queue micro task", - "queue microtask", - "queue-microtask", - "queueMicrotask", - "setImmediate", - "task" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/queue-microtask.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml deleted file mode 100644 index 4872c5a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/dependabot.yml +++ /dev/null @@ -1,7 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml deleted file mode 100644 index 1e30ad8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/.github/workflows/ci.yml +++ /dev/null @@ -1,96 +0,0 @@ -name: ci - -on: [push, pull_request] - -jobs: - legacy: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install --production && npm install tape - - - name: Run tests - run: | - npm run test - - test: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [18.x, 20.x, 22.x] - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Run tests - run: | - npm run test:coverage - - types: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - - - name: Install - run: | - npm install - - - name: Run types tests - run: | - npm run test:typescript - - lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - - - name: Install - run: | - npm install - - - name: Lint - run: | - npm run lint diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE deleted file mode 100644 index 56d1590..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-2024 Matteo Collina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md deleted file mode 100644 index 1aaee5d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/README.md +++ /dev/null @@ -1,139 +0,0 @@ -# reusify - -[![npm version][npm-badge]][npm-url] - -Reuse your objects and functions for maximum speed. This technique will -make any function run ~10% faster. You call your functions a -lot, and it adds up quickly in hot code paths. - -``` -$ node benchmarks/createNoCodeFunction.js -Total time 53133 -Total iterations 100000000 -Iteration/s 1882069.5236482036 - -$ node benchmarks/reuseNoCodeFunction.js -Total time 50617 -Total iterations 100000000 -Iteration/s 1975620.838848608 -``` - -The above benchmark uses fibonacci to simulate a real high-cpu load. -The actual numbers might differ for your use case, but the difference -should not. - -The benchmark was taken using Node v6.10.0. - -This library was extracted from -[fastparallel](http://npm.im/fastparallel). - -## Example - -```js -var reusify = require('reusify') -var fib = require('reusify/benchmarks/fib') -var instance = reusify(MyObject) - -// get an object from the cache, -// or creates a new one when cache is empty -var obj = instance.get() - -// set the state -obj.num = 100 -obj.func() - -// reset the state. -// if the state contains any external object -// do not use delete operator (it is slow) -// prefer set them to null -obj.num = 0 - -// store an object in the cache -instance.release(obj) - -function MyObject () { - // you need to define this property - // so V8 can compile MyObject into an - // hidden class - this.next = null - this.num = 0 - - var that = this - - // this function is never reallocated, - // so it can be optimized by V8 - this.func = function () { - if (null) { - // do nothing - } else { - // calculates fibonacci - fib(that.num) - } - } -} -``` - -The above example was intended for synchronous code, let's see async: -```js -var reusify = require('reusify') -var instance = reusify(MyObject) - -for (var i = 0; i < 100; i++) { - getData(i, console.log) -} - -function getData (value, cb) { - var obj = instance.get() - - obj.value = value - obj.cb = cb - obj.run() -} - -function MyObject () { - this.next = null - this.value = null - - var that = this - - this.run = function () { - asyncOperation(that.value, that.handle) - } - - this.handle = function (err, result) { - that.cb(err, result) - that.value = null - that.cb = null - instance.release(that) - } -} -``` - -Also note how in the above examples, the code, that consumes an instance of `MyObject`, -reset the state to initial condition, just before storing it in the cache. -That's needed so that every subsequent request for an instance from the cache, -could get a clean instance. - -## Why - -It is faster because V8 doesn't have to collect all the functions you -create. On a short-lived benchmark, it is as fast as creating the -nested function, but on a longer time frame it creates less -pressure on the garbage collector. - -## Other examples -If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). - -## Acknowledgements - -Thanks to [Trevor Norris](https://github.com/trevnorris) for -getting me down the rabbit hole of performance, and thanks to [Mathias -Buss](http://github.com/mafintosh) for suggesting me to share this -trick. - -## License - -MIT - -[npm-badge]: https://badge.fury.io/js/reusify.svg -[npm-url]: https://badge.fury.io/js/reusify diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md deleted file mode 100644 index dd9f1d5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/SECURITY.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security Policy - -## Supported Versions - -Use this section to tell people about which versions of your project are -currently being supported with security updates. - -| Version | Supported | -| ------- | ------------------ | -| 1.x | :white_check_mark: | -| < 1.0 | :x: | - -## Reporting a Vulnerability - -Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js deleted file mode 100644 index ce1aac7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/createNoCodeFunction.js +++ /dev/null @@ -1,30 +0,0 @@ -'use strict' - -var fib = require('./fib') -var max = 100000000 -var start = Date.now() - -// create a funcion with the typical error -// pattern, that delegates the heavy load -// to something else -function createNoCodeFunction () { - /* eslint no-constant-condition: "off" */ - var num = 100 - - ;(function () { - if (null) { - // do nothing - } else { - fib(num) - } - })() -} - -for (var i = 0; i < max; i++) { - createNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js deleted file mode 100644 index e22cc48..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/fib.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -function fib (num) { - var fib = [] - - fib[0] = 0 - fib[1] = 1 - for (var i = 2; i <= num; i++) { - fib[i] = fib[i - 2] + fib[i - 1] - } -} - -module.exports = fib diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js deleted file mode 100644 index 3358d6e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/benchmarks/reuseNoCodeFunction.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict' - -var reusify = require('../') -var fib = require('./fib') -var instance = reusify(MyObject) -var max = 100000000 -var start = Date.now() - -function reuseNoCodeFunction () { - var obj = instance.get() - obj.num = 100 - obj.func() - obj.num = 0 - instance.release(obj) -} - -function MyObject () { - this.next = null - var that = this - this.num = 0 - this.func = function () { - /* eslint no-constant-condition: "off" */ - if (null) { - // do nothing - } else { - fib(that.num) - } - } -} - -for (var i = 0; i < max; i++) { - reuseNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js deleted file mode 100644 index d0a9af6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/eslint.config.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const base = require('neostandard')({}) - -module.exports = [ - ...base, - { - name: 'old-standard', - rules: { - 'no-var': 'off', - 'object-shorthand': 'off', - } - } -] diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json deleted file mode 100644 index e47ff11..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "reusify", - "version": "1.1.0", - "description": "Reuse objects and functions with style", - "main": "reusify.js", - "types": "reusify.d.ts", - "scripts": { - "lint": "eslint", - "test": "tape test.js", - "test:coverage": "c8 --100 tape test.js", - "test:typescript": "tsc" - }, - "pre-commit": [ - "lint", - "test", - "test:typescript" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/reusify.git" - }, - "keywords": [ - "reuse", - "object", - "performance", - "function", - "fast" - ], - "author": "Matteo Collina ", - "license": "MIT", - "bugs": { - "url": "https://github.com/mcollina/reusify/issues" - }, - "homepage": "https://github.com/mcollina/reusify#readme", - "engines": { - "node": ">=0.10.0", - "iojs": ">=1.0.0" - }, - "devDependencies": { - "@types/node": "^22.9.0", - "eslint": "^9.13.0", - "neostandard": "^0.12.0", - "pre-commit": "^1.2.2", - "tape": "^5.0.0", - "c8": "^10.1.2", - "typescript": "^5.2.2" - }, - "dependencies": { - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts deleted file mode 100644 index 9ba277d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -interface Node { - next: Node | null; -} - -interface Constructor { - new(): T; -} - -declare function reusify(constructor: Constructor): { - get(): T; - release(node: T): void; -}; - -export = reusify; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js deleted file mode 100644 index e6f36f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/reusify.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -function reusify (Constructor) { - var head = new Constructor() - var tail = head - - function get () { - var current = head - - if (current.next) { - head = current.next - } else { - head = new Constructor() - tail = head - } - - current.next = null - - return current - } - - function release (obj) { - tail.next = obj - tail = obj - } - - return { - get: get, - release: release - } -} - -module.exports = reusify diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js deleted file mode 100644 index 929cfd7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/test.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -var test = require('tape') -var reusify = require('./') - -test('reuse objects', function (t) { - t.plan(6) - - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - - t.notEqual(obj, instance.get(), 'two instance created') - t.notOk(obj.next, 'next must be null') - - instance.release(obj) - - // the internals keeps a hot copy ready for reuse - // putting this one back in the queue - instance.release(instance.get()) - - // comparing the old one with the one we got - // never do this in real code, after release you - // should never reuse that instance - t.equal(obj, instance.get(), 'instance must be reused') -}) - -test('reuse more than 2 objects', function (t) { - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - var obj2 = instance.get() - var obj3 = instance.get() - - t.notOk(obj.next, 'next must be null') - t.notOk(obj2.next, 'next must be null') - t.notOk(obj3.next, 'next must be null') - - t.notEqual(obj, obj2) - t.notEqual(obj, obj3) - t.notEqual(obj3, obj2) - - instance.release(obj) - instance.release(obj2) - instance.release(obj3) - - // skip one - instance.get() - - var obj4 = instance.get() - var obj5 = instance.get() - var obj6 = instance.get() - - t.equal(obj4, obj) - t.equal(obj5, obj2) - t.equal(obj6, obj3) - t.end() -}) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json deleted file mode 100644 index dbe862b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/reusify/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "noEmit": true, - "strict": true - }, - "files": [ - "./reusify.d.ts" - ] -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE deleted file mode 100644 index c7e6852..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md deleted file mode 100644 index edc3da4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg -[travis-url]: https://travis-ci.org/feross/run-parallel -[npm-image]: https://img.shields.io/npm/v/run-parallel.svg -[npm-url]: https://npmjs.org/package/run-parallel -[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg -[downloads-url]: https://npmjs.org/package/run-parallel -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### Run an array of functions in parallel - -![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) - -### install - -``` -npm install run-parallel -``` - -### usage - -#### parallel(tasks, [callback]) - -Run the `tasks` array of functions in parallel, without waiting until the previous -function has completed. If any of the functions pass an error to its callback, the main -`callback` is immediately called with the value of the error. Once the `tasks` have -completed, the results are passed to the final `callback` as an array. - -It is also possible to use an object instead of an array. Each property will be run as a -function and the results will be passed to the final `callback` as an object instead of -an array. This can be a more readable way of handling the results. - -##### arguments - -- `tasks` - An array or object containing functions to run. Each function is passed a -`callback(err, result)` which it must call on completion with an error `err` (which can -be `null`) and an optional `result` value. -- `callback(err, results)` - An optional callback to run once all the functions have -completed. This function gets a results array (or object) containing all the result -arguments passed to the task callbacks. - -##### example - -```js -var parallel = require('run-parallel') - -parallel([ - function (callback) { - setTimeout(function () { - callback(null, 'one') - }, 200) - }, - function (callback) { - setTimeout(function () { - callback(null, 'two') - }, 100) - } -], -// optional callback -function (err, results) { - // the results array will equal ['one','two'] even though - // the second function had a shorter timeout. -}) -``` - -This module is basically equavalent to -[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's -handy to just have the one function you need instead of the kitchen sink. Modularity! -Especially handy if you're serving to the browser and need to reduce your javascript -bundle size. - -Works great in the browser with [browserify](http://browserify.org/)! - -### see also - -- [run-auto](https://github.com/feross/run-auto) -- [run-parallel-limit](https://github.com/feross/run-parallel-limit) -- [run-series](https://github.com/feross/run-series) -- [run-waterfall](https://github.com/feross/run-waterfall) - -### license - -MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js deleted file mode 100644 index 6307141..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/index.js +++ /dev/null @@ -1,51 +0,0 @@ -/*! run-parallel. MIT License. Feross Aboukhadijeh */ -module.exports = runParallel - -const queueMicrotask = require('queue-microtask') - -function runParallel (tasks, cb) { - let results, pending, keys - let isSync = true - - if (Array.isArray(tasks)) { - results = [] - pending = tasks.length - } else { - keys = Object.keys(tasks) - results = {} - pending = keys.length - } - - function done (err) { - function end () { - if (cb) cb(err, results) - cb = null - } - if (isSync) queueMicrotask(end) - else end() - } - - function each (i, err, result) { - results[i] = result - if (--pending === 0 || err) { - done(err) - } - } - - if (!pending) { - // empty - done(null) - } else if (keys) { - // object - keys.forEach(function (key) { - tasks[key](function (err, result) { each(key, err, result) }) - }) - } else { - // array - tasks.forEach(function (task, i) { - task(function (err, result) { each(i, err, result) }) - }) - } - - isSync = false -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json deleted file mode 100644 index 1f14757..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/run-parallel/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "run-parallel", - "description": "Run an array of functions in parallel", - "version": "1.2.0", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/run-parallel/issues" - }, - "dependencies": { - "queue-microtask": "^1.2.2" - }, - "devDependencies": { - "airtap": "^3.0.0", - "standard": "*", - "tape": "^5.0.1" - }, - "homepage": "https://github.com/feross/run-parallel", - "keywords": [ - "parallel", - "async", - "function", - "callback", - "asynchronous", - "run", - "array", - "run parallel" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/run-parallel.git" - }, - "scripts": { - "test": "standard && npm run test-node && npm run test-browser", - "test-browser": "airtap -- test/*.js", - "test-browser-local": "airtap --local -- test/*.js", - "test-node": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js deleted file mode 100644 index f35db30..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/index.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; -const shebangRegex = require('shebang-regex'); - -module.exports = (string = '') => { - const match = string.match(shebangRegex); - - if (!match) { - return null; - } - - const [path, argument] = match[0].replace(/#! ?/, '').split(' '); - const binary = path.split('/').pop(); - - if (binary === 'env') { - return argument; - } - - return argument ? `${binary} ${argument}` : binary; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license deleted file mode 100644 index db6bc32..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Kevin Mårtensson (github.com/kevva) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json deleted file mode 100644 index 18e3c04..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "shebang-command", - "version": "2.0.0", - "description": "Get the command from a shebang", - "license": "MIT", - "repository": "kevva/shebang-command", - "author": { - "name": "Kevin Mårtensson", - "email": "kevinmartensson@gmail.com", - "url": "github.com/kevva" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "cmd", - "command", - "parse", - "shebang" - ], - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "devDependencies": { - "ava": "^2.3.0", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md deleted file mode 100644 index 84feb44..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-command/readme.md +++ /dev/null @@ -1,34 +0,0 @@ -# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) - -> Get the command from a shebang - - -## Install - -``` -$ npm install shebang-command -``` - - -## Usage - -```js -const shebangCommand = require('shebang-command'); - -shebangCommand('#!/usr/bin/env node'); -//=> 'node' - -shebangCommand('#!/bin/bash'); -//=> 'bash' -``` - - -## API - -### shebangCommand(string) - -#### string - -Type: `string` - -String containing a shebang. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts deleted file mode 100644 index 61d034b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** -Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. - -@example -``` -import shebangRegex = require('shebang-regex'); - -const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; - -shebangRegex.test(string); -//=> true - -shebangRegex.exec(string)[0]; -//=> '#!/usr/bin/env node' - -shebangRegex.exec(string)[1]; -//=> '/usr/bin/env node' -``` -*/ -declare const shebangRegex: RegExp; - -export = shebangRegex; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js deleted file mode 100644 index 63fc4a0..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/index.js +++ /dev/null @@ -1,2 +0,0 @@ -'use strict'; -module.exports = /^#!(.*)/; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json deleted file mode 100644 index 00ab30f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "shebang-regex", - "version": "3.0.0", - "description": "Regular expression for matching a shebang line", - "license": "MIT", - "repository": "sindresorhus/shebang-regex", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "regex", - "regexp", - "shebang", - "match", - "test", - "line" - ], - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md deleted file mode 100644 index 5ecf863..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shebang-regex/readme.md +++ /dev/null @@ -1,33 +0,0 @@ -# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) - -> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line - - -## Install - -``` -$ npm install shebang-regex -``` - - -## Usage - -```js -const shebangRegex = require('shebang-regex'); - -const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; - -shebangRegex.test(string); -//=> true - -shebangRegex.exec(string)[0]; -//=> '#!/usr/bin/env node' - -shebangRegex.exec(string)[1]; -//=> '/usr/bin/env node' -``` - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE deleted file mode 100644 index 40a2bf6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2012, Artur Adib -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md deleted file mode 100644 index a0de676..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/README.md +++ /dev/null @@ -1,949 +0,0 @@ -# ShellJS - Unix shell commands for Node.js - -[![GitHub Actions](https://img.shields.io/github/actions/workflow/status/shelljs/shelljs/main.yml?style=flat-square&logo=github)](https://github.com/shelljs/shelljs/actions/workflows/main.yml) -[![Codecov](https://img.shields.io/codecov/c/github/shelljs/shelljs/main.svg?style=flat-square&label=coverage)](https://codecov.io/gh/shelljs/shelljs) -[![npm version](https://img.shields.io/npm/v/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) -[![npm downloads](https://img.shields.io/npm/dm/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) - -ShellJS is a portable **(Windows/Linux/macOS)** implementation of Unix shell -commands on top of the Node.js API. You can use it to eliminate your shell -script's dependency on Unix while still keeping its familiar and powerful -commands. You can also install it globally so you can run it from outside Node -projects - say goodbye to those gnarly Bash scripts! - -ShellJS is proudly tested on every LTS node release since `v18`! - -The project is unit-tested and battle-tested in projects like: - -+ [Firebug](http://getfirebug.com/) - Firefox's infamous debugger -+ [JSHint](http://jshint.com) & [ESLint](http://eslint.org/) - popular JavaScript linters -+ [Zepto](http://zeptojs.com) - jQuery-compatible JavaScript library for modern browsers -+ [Yeoman](http://yeoman.io/) - Web application stack and development tool -+ [Deployd.com](http://deployd.com) - Open source PaaS for quick API backend generation -+ And [many more](https://npmjs.org/browse/depended/shelljs). - -If you have feedback, suggestions, or need help, feel free to post in our [issue -tracker](https://github.com/shelljs/shelljs/issues). - -Think ShellJS is cool? Check out some related projects in our [Wiki -page](https://github.com/shelljs/shelljs/wiki)! - -Upgrading from an older version? Check out our [breaking -changes](https://github.com/shelljs/shelljs/wiki/Breaking-Changes) page to see -what changes to watch out for while upgrading. - -## Command line use - -If you just want cross platform UNIX commands, checkout our new project -[shelljs/shx](https://github.com/shelljs/shx), a utility to expose `shelljs` to -the command line. - -For example: - -``` -$ shx mkdir -p foo -$ shx touch foo/bar.txt -$ shx rm -rf foo -``` - -## Plugin API - -ShellJS now supports third-party plugins! You can learn more about using plugins -and writing your own ShellJS commands in [the -wiki](https://github.com/shelljs/shelljs/wiki/Using-ShellJS-Plugins). - -## A quick note about the docs - -For documentation on all the latest features, check out our -[README](https://github.com/shelljs/shelljs). To read docs that are consistent -with the latest release, check out [the npm -page](https://www.npmjs.com/package/shelljs). - -## Installing - -Via npm: - -```bash -$ npm install [-g] shelljs -``` - -## Examples - -```javascript -var shell = require('shelljs'); - -if (!shell.which('git')) { - shell.echo('Sorry, this script requires git'); - shell.exit(1); -} - -// Copy files to release dir -shell.rm('-rf', 'out/Release'); -shell.cp('-R', 'stuff/', 'out/Release'); - -// Replace macros in each .js file -shell.cd('lib'); -shell.ls('*.js').forEach(function (file) { - shell.sed('-i', 'BUILD_VERSION', 'v0.1.2', file); - shell.sed('-i', /^.*REMOVE_THIS_LINE.*$/, '', file); - shell.sed('-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, shell.cat('macro.js'), file); -}); -shell.cd('..'); - -// Run external tool synchronously -if (shell.exec('git commit -am "Auto-commit"').code !== 0) { - shell.echo('Error: Git commit failed'); - shell.exit(1); -} -``` - -## Exclude options - -If you need to pass a parameter that looks like an option, you can do so like: - -```js -shell.grep('--', '-v', 'path/to/file'); // Search for "-v", no grep options - -shell.cp('-R', '-dir', 'outdir'); // If already using an option, you're done -``` - -## Global vs. Local - -We no longer recommend using a global-import for ShellJS (i.e. -`require('shelljs/global')`). While still supported for convenience, this -pollutes the global namespace, and should therefore only be used with caution. - -Instead, we recommend a local import (standard for npm packages): - -```javascript -var shell = require('shelljs'); -shell.echo('hello world'); -``` - -Alternatively, we also support importing as a module with: - -```javascript -import shell from 'shelljs'; -shell.echo('hello world'); -``` - - - - -## Command reference - - -All commands run synchronously, unless otherwise stated. -All commands accept standard bash globbing characters (`*`, `?`, etc.), -compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). - -For less-commonly used commands and features, please check out our [wiki -page](https://github.com/shelljs/shelljs/wiki). - - -### cat([options,] file [, file ...]) -### cat([options,] file_array) - -Available options: - -+ `-n`: number all output lines - -Examples: - -```javascript -var str = cat('file*.txt'); -var str = cat('file1', 'file2'); -var str = cat(['file1', 'file2']); // same as above -``` - -Returns a [ShellString](#shellstringstr) containing the given file, or a -concatenated string containing the files if more than one file is given (a -new line character is introduced between each file). - - -### cd([dir]) - -Changes to directory `dir` for the duration of the script. Changes to home -directory if no argument is supplied. Returns a -[ShellString](#shellstringstr) to indicate success or failure. - - -### chmod([options,] octal_mode || octal_string, file) -### chmod([options,] symbolic_mode, file) - -Available options: - -+ `-v`: output a diagnostic for every file processed -+ `-c`: like verbose, but report only when a change is made -+ `-R`: change files and directories recursively - -Examples: - -```javascript -chmod(755, '/Users/brandon'); -chmod('755', '/Users/brandon'); // same as above -chmod('u+x', '/Users/brandon'); -chmod('-R', 'a-w', '/Users/brandon'); -``` - -Alters the permissions of a file or directory by either specifying the -absolute permissions in octal form or expressing the changes in symbols. -This command tries to mimic the POSIX behavior as much as possible. -Notable exceptions: - -+ In symbolic modes, `a-r` and `-r` are identical. No consideration is - given to the `umask`. -+ There is no "quiet" option, since default behavior is to run silent. -+ Windows OS uses a very different permission model than POSIX. `chmod()` - does its best on Windows, but there are limits to how file permissions can - be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, - so cross-platform compatibility should not be a concern there. - -Returns a [ShellString](#shellstringstr) indicating success or failure. - - -### cmd(arg1[, arg2, ...] [, options]) - -Available options: - -+ `cwd: directoryPath`: change the current working directory only for this - cmd() invocation. -+ `maxBuffer: num`: Raise or decrease the default buffer size for - stdout/stderr. -+ `timeout`: Change the default timeout. - -Examples: - -```javascript -var version = cmd('node', '--version').stdout; -cmd('git', 'commit', '-am', `Add suport for node ${version}`); -console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) -console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) -``` - -Executes the given command synchronously. This is intended as an easier -alternative for [exec()](#execcommand--options--callback), with better -security around globbing, comamnd injection, and variable expansion. This is -guaranteed to only run one external command, and won't give special -treatment for any shell characters (ex. this treats `|` as a literal -character, not as a shell pipeline). -This returns a [ShellString](#shellstringstr). - -By default, this performs globbing on all platforms, but you can disable -this with `set('-f')`. - -This **does not** support asynchronous mode. If you need asynchronous -command execution, check out [execa](https://www.npmjs.com/package/execa) or -the node builtin `child_process.execFile()` instead. - - -### cp([options,] source [, source ...], dest) -### cp([options,] source_array, dest) - -Available options: - -+ `-f`: force (default behavior) -+ `-n`: no-clobber -+ `-u`: only copy if `source` is newer than `dest` -+ `-r`, `-R`: recursive -+ `-L`: follow symlinks -+ `-P`: don't follow symlinks -+ `-p`: preserve file mode, ownership, and timestamps - -Examples: - -```javascript -cp('file1', 'dir1'); -cp('-R', 'path/to/dir/', '~/newCopy/'); -cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); -cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above -``` - -Copies files. Returns a [ShellString](#shellstringstr) indicating success -or failure. - - -### pushd([options,] [dir | '-N' | '+N']) - -Available options: - -+ `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. -+ `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -+ `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. - -Examples: - -```javascript -// process.cwd() === '/usr' -pushd('/etc'); // Returns /etc /usr -pushd('+1'); // Returns /usr /etc -``` - -Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. - - -### popd([options,] ['-N' | '+N']) - -Available options: - -+ `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. -+ `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. - -Examples: - -```javascript -echo(process.cwd()); // '/usr' -pushd('/etc'); // '/etc /usr' -echo(process.cwd()); // '/etc' -popd(); // '/usr' -echo(process.cwd()); // '/usr' -``` - -When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. - - -### dirs([options | '+N' | '-N']) - -Available options: - -+ `-c`: Clears the directory stack by deleting all of the elements. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. -+ `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. - -Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. - -See also: `pushd`, `popd` - - -### echo([options,] string [, string ...]) - -Available options: - -+ `-e`: interpret backslash escapes (default) -+ `-n`: remove trailing newline from output - -Examples: - -```javascript -echo('hello world'); -var str = echo('hello world'); -echo('-n', 'no newline at end'); -``` - -Prints `string` to stdout, and returns a [ShellString](#shellstringstr). - - -### exec(command [, options] [, callback]) - -Available options: - -+ `async`: Asynchronous execution. If a callback is provided, it will be set to - `true`, regardless of the passed value (default: `false`). -+ `fatal`: Exit upon error (default: `false`). -+ `silent`: Do not echo program output to console (default: `false`). -+ `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and - what is written to stdout and stderr when not in silent mode (default: `'utf8'`). -+ and any option available to Node.js's - [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) - -Examples: - -```javascript -var version = exec('node --version', {silent:true}).stdout; - -var child = exec('some_long_running_process', {async:true}); -child.stdout.on('data', function(data) { - /* ... do something with data ... */ -}); - -exec('some_long_running_process', function(code, stdout, stderr) { - console.log('Exit code:', code); - console.log('Program output:', stdout); - console.log('Program stderr:', stderr); -}); -``` - -Executes the given `command` _synchronously_, unless otherwise specified. -When in synchronous mode, this returns a [ShellString](#shellstringstr). -Otherwise, this returns the child process object, and the `callback` -receives the arguments `(code, stdout, stderr)`. - -Not seeing the behavior you want? `exec()` runs everything through `sh` -by default (or `cmd.exe` on Windows), which differs from `bash`. If you -need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. - -**Security note:** as `shell.exec()` executes an arbitrary string in the -system shell, it is **critical** to properly sanitize user input to avoid -**command injection**. For more context, consult the [Security -Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). - - -### find(path [, path ...]) -### find(path_array) - -Examples: - -```javascript -find('src', 'lib'); -find(['src', 'lib']); // same as above -find('.').filter(function(file) { return file.match(/\.js$/); }); -``` - -Returns a [ShellString](#shellstringstr) (with array-like properties) of all -files (however deep) in the given paths. - -The main difference from `ls('-R', path)` is that the resulting file names -include the base directories (e.g., `lib/resources/file1` instead of just `file1`). - - -### grep([options,] regex_filter, file [, file ...]) -### grep([options,] regex_filter, file_array) - -Available options: - -+ `-v`: Invert `regex_filter` (only print non-matching lines). -+ `-l`: Print only filenames of matching files. -+ `-i`: Ignore case. -+ `-n`: Print line numbers. -+ `-B `: Show `` lines before each result. -+ `-A `: Show `` lines after each result. -+ `-C `: Show `` lines before and after each result. -B and -A override this option. - -Examples: - -```javascript -grep('-v', 'GLOBAL_VARIABLE', '*.js'); -grep('GLOBAL_VARIABLE', '*.js'); -grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); -grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); -grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); -``` - -Reads input string from given files and returns a -[ShellString](#shellstringstr) containing all lines of the @ file that match -the given `regex_filter`. - - -### head([{'-n': \},] file [, file ...]) -### head([{'-n': \},] file_array) - -Available options: - -+ `-n `: Show the first `` lines of the files - -Examples: - -```javascript -var str = head({'-n': 1}, 'file*.txt'); -var str = head('file1', 'file2'); -var str = head(['file1', 'file2']); // same as above -``` - -Read the start of a `file`. Returns a [ShellString](#shellstringstr). - - -### ln([options,] source, dest) - -Available options: - -+ `-s`: symlink -+ `-f`: force - -Examples: - -```javascript -ln('file', 'newlink'); -ln('-sf', 'file', 'existing'); -``` - -Links `source` to `dest`. Use `-f` to force the link, should `dest` already -exist. Returns a [ShellString](#shellstringstr) indicating success or -failure. - - -### ls([options,] [path, ...]) -### ls([options,] path_array) - -Available options: - -+ `-R`: recursive -+ `-A`: all files (include files beginning with `.`, except for `.` and `..`) -+ `-L`: follow symlinks -+ `-d`: list directories themselves, not their contents -+ `-l`: provides more details for each file. Specifically, each file is - represented by a structured object with separate fields for file - metadata (see - [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The - return value also overrides `.toString()` to resemble `ls -l`'s - output format for human readability, but programmatic usage should - depend on the stable object format rather than the `.toString()` - representation. - -Examples: - -```javascript -ls('projs/*.js'); -ls('projs/**/*.js'); // Find all js files recursively in projs -ls('-R', '/users/me', '/tmp'); -ls('-R', ['/users/me', '/tmp']); // same as above -ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} -``` - -Returns a [ShellString](#shellstringstr) (with array-like properties) of all -the files in the given `path`, or files in the current directory if no -`path` is provided. - - -### mkdir([options,] dir [, dir ...]) -### mkdir([options,] dir_array) - -Available options: - -+ `-p`: full path (and create intermediate directories, if necessary) - -Examples: - -```javascript -mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); -mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above -``` - -Creates directories. Returns a [ShellString](#shellstringstr) indicating -success or failure. - - -### mv([options ,] source [, source ...], dest') -### mv([options ,] source_array, dest') - -Available options: - -+ `-f`: force (default behavior) -+ `-n`: no-clobber - -Examples: - -```javascript -mv('-n', 'file', 'dir/'); -mv('file1', 'file2', 'dir/'); -mv(['file1', 'file2'], 'dir/'); // same as above -``` - -Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) -indicating success or failure. - - -### pwd() - -Returns the current directory as a [ShellString](#shellstringstr). - - -### rm([options,] file [, file ...]) -### rm([options,] file_array) - -Available options: - -+ `-f`: force -+ `-r, -R`: recursive - -Examples: - -```javascript -rm('-rf', '/tmp/*'); -rm('some_file.txt', 'another_file.txt'); -rm(['some_file.txt', 'another_file.txt']); // same as above -``` - -Removes files. Returns a [ShellString](#shellstringstr) indicating success -or failure. - - -### sed([options,] search_regex, replacement, file [, file ...]) -### sed([options,] search_regex, replacement, file_array) - -Available options: - -+ `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ - -Examples: - -```javascript -sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); -``` - -Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on -each of the lines from the input string using the given `search_regex` and `replacement` string or -function. Returns the new [ShellString](#shellstringstr) after replacement. - -Note: - -Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified -using the `$n` syntax: - -```javascript -sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); -``` - -Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file -(split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') -will not match anything and nothing will be replaced. - - -### set(options) - -Available options: - -+ `+/-e`: exit upon error (`config.fatal`) -+ `+/-v`: verbose: show all commands (`config.verbose`) -+ `+/-f`: disable filename expansion (globbing) - -Examples: - -```javascript -set('-e'); // exit upon first error -set('+e'); // this undoes a "set('-e')" -``` - -Sets global configuration variables. - - -### sort([options,] file [, file ...]) -### sort([options,] file_array) - -Available options: - -+ `-r`: Reverse the results -+ `-n`: Compare according to numerical value - -Examples: - -```javascript -sort('foo.txt', 'bar.txt'); -sort('-r', 'foo.txt'); -``` - -Return the contents of the `file`s, sorted line-by-line as a -[ShellString](#shellstringstr). Sorting multiple files mixes their content -(just as unix `sort` does). - - -### tail([{'-n': \},] file [, file ...]) -### tail([{'-n': \},] file_array) - -Available options: - -+ `-n `: Show the last `` lines of `file`s - -Examples: - -```javascript -var str = tail({'-n': 1}, 'file*.txt'); -var str = tail('file1', 'file2'); -var str = tail(['file1', 'file2']); // same as above -``` - -Read the end of a `file`. Returns a [ShellString](#shellstringstr). - - -### tempdir() - -Examples: - -```javascript -var tmp = tempdir(); // "/tmp" for most *nix platforms -``` - -Searches and returns string containing a writeable, platform-dependent temporary directory. -Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). - - -### test(expression) - -Available expression primaries: - -+ `'-b', 'path'`: true if path is a block device -+ `'-c', 'path'`: true if path is a character device -+ `'-d', 'path'`: true if path is a directory -+ `'-e', 'path'`: true if path exists -+ `'-f', 'path'`: true if path is a regular file -+ `'-L', 'path'`: true if path is a symbolic link -+ `'-p', 'path'`: true if path is a pipe (FIFO) -+ `'-S', 'path'`: true if path is a socket - -Examples: - -```javascript -if (test('-d', path)) { /* do something with dir */ }; -if (!test('-f', path)) continue; // skip if it's not a regular file -``` - -Evaluates `expression` using the available primaries and returns -corresponding boolean value. - - -### ShellString.prototype.to(file) - -Examples: - -```javascript -cat('input.txt').to('output.txt'); -``` - -Analogous to the redirection operator `>` in Unix, but works with -`ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix -redirections, `to()` will overwrite any existing file!_ Returns the same -[ShellString](#shellstringstr) this operated on, to support chaining. - - -### ShellString.prototype.toEnd(file) - -Examples: - -```javascript -cat('input.txt').toEnd('output.txt'); -``` - -Analogous to the redirect-and-append operator `>>` in Unix, but works with -`ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the -same [ShellString](#shellstringstr) this operated on, to support chaining. - - -### touch([options,] file [, file ...]) -### touch([options,] file_array) - -Available options: - -+ `-a`: Change only the access time -+ `-c`: Do not create any files -+ `-m`: Change only the modification time -+ `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) - instead of current time -+ `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current - time - -Examples: - -```javascript -touch('source.js'); -touch('-c', 'path/to/file.js'); -touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); -touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); -touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); -``` - -Update the access and modification times of each file to the current time. -A file argument that does not exist is created empty, unless `-c` is supplied. -This is a partial implementation of -[`touch(1)`](http://linux.die.net/man/1/touch). Returns a -[ShellString](#shellstringstr) indicating success or failure. - - -### uniq([options,] [input, [output]]) - -Available options: - -+ `-i`: Ignore case while comparing -+ `-c`: Prefix lines by the number of occurrences -+ `-d`: Only print duplicate lines, one for each group of identical lines - -Examples: - -```javascript -uniq('foo.txt'); -uniq('-i', 'foo.txt'); -uniq('-cd', 'foo.txt', 'bar.txt'); -``` - -Filter adjacent matching lines from `input`. Returns a -[ShellString](#shellstringstr). - - -### which(command) - -Examples: - -```javascript -var nodeExec = which('node'); -``` - -Searches for `command` in the system's `PATH`. On Windows, this uses the -`PATHEXT` variable to append the extension if it's not already executable. -Returns a [ShellString](#shellstringstr) containing the absolute path to -`command`. - - -### exit(code) - -Exits the current process with the given exit `code`. - -### error() - -Tests if error occurred in the last command. Returns a truthy value if an -error returned, or a falsy value otherwise. - -**Note**: do not rely on the -return value to be an error message. If you need the last error message, use -the `.stderr` attribute from the last command's return value instead. - - -### errorCode() - -Returns the error code from the last command. - - -### ShellString(str) - -Examples: - -```javascript -var foo = new ShellString('hello world'); -``` - -This is a dedicated type returned by most ShellJS methods, which wraps a -string (or array) value. This has all the string (or array) methods, but -also exposes extra methods: [`.to()`](#shellstringprototypetofile), -[`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods -(ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string -by calling `.toString()`. - -This type also exposes the corresponding command's stdout, stderr, and -return status code via the `.stdout` (string), `.stderr` (string), and -`.code` (number) properties respectively. - - -### env['VAR_NAME'] - -Object containing environment variables (both getter and setter). Shortcut -to `process.env`. - -### Pipes - -Examples: - -```javascript -grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); -echo("files with o's in the name:\n" + ls().grep('o')); -cat('test.js').exec('node'); // pipe to exec() call -``` - -Commands can send their output to another command in a pipe-like fashion. -`sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand -side of a pipe. Pipes can be chained. - -## Configuration - - -### config.silent - -Example: - -```javascript -var sh = require('shelljs'); -var silentState = sh.config.silent; // save old silent state -sh.config.silent = true; -/* ... */ -sh.config.silent = silentState; // restore old silent state -``` - -Suppresses all command output if `true`, except for `echo()` calls. -Default is `false`. - -### config.fatal - -Example: - -```javascript -require('shelljs/global'); -config.fatal = true; // or set('-e'); -cp('this_file_does_not_exist', '/dev/null'); // throws Error here -/* more commands... */ -``` - -If `true`, the script will throw a Javascript error when any shell.js -command encounters an error. Default is `false`. This is analogous to -Bash's `set -e`. - -### config.verbose - -Example: - -```javascript -config.verbose = true; // or set('-v'); -cd('dir/'); -rm('-rf', 'foo.txt', 'bar.txt'); -exec('echo hello'); -``` - -Will print each command as follows: - -``` -cd dir/ -rm -rf foo.txt bar.txt -exec echo hello -``` - -### config.globOptions (deprecated) - -**Deprecated**: we recommend that you do not edit `config.globOptions`. -Support for this configuration option may be changed or removed in a future -ShellJS release. - -**Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS -v0.9.x, `config.globOptions` is compatible with `fast-glob`. - -Example: - -```javascript -config.globOptions = {nodir: true}; -``` - -`config.globOptions` changes how ShellJS expands glob (wildcard) -expressions. See -[fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) -for available options. Be aware that modifying `config.globOptions` **may -break ShellJS functionality.** - -### config.reset() - -Example: - -```javascript -var shell = require('shelljs'); -// Make changes to shell.config, and do stuff... -/* ... */ -shell.config.reset(); // reset to original state -// Do more stuff, but with original settings -/* ... */ -``` - -Reset `shell.config` to the defaults: - -```javascript -{ - fatal: false, - globOptions: {}, - maxdepth: 255, - noglob: false, - silent: false, - verbose: false, -} -``` - -## Team - -| [![Nate Fischer](https://avatars.githubusercontent.com/u/5801521?s=130)](https://github.com/nfischer) | -|:---:| -| [Nate Fischer](https://github.com/nfischer) | diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js deleted file mode 100644 index e061f5a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/global.js +++ /dev/null @@ -1,15 +0,0 @@ -/* eslint no-extend-native: 0 */ -var shell = require('./shell'); -var common = require('./src/common'); - -Object.keys(shell).forEach(function (cmd) { - global[cmd] = shell[cmd]; -}); - -var _to = require('./src/to'); - -String.prototype.to = common.wrap('to', _to); - -var _toEnd = require('./src/toEnd'); - -String.prototype.toEnd = common.wrap('toEnd', _toEnd); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js deleted file mode 100644 index a8438c8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/make.js +++ /dev/null @@ -1,57 +0,0 @@ -require('./global'); - -global.config.fatal = true; -global.target = {}; - -var args = process.argv.slice(2), - targetArgs, - dashesLoc = args.indexOf('--'); - -// split args, everything after -- if only for targets -if (dashesLoc > -1) { - targetArgs = args.slice(dashesLoc + 1, args.length); - args = args.slice(0, dashesLoc); -} - -// This ensures we only execute the script targets after the entire script has -// been evaluated -setTimeout(function() { - var t; - - if (args.length === 1 && args[0] === '--help') { - console.log('Available targets:'); - for (t in global.target) - console.log(' ' + t); - return; - } - - // Wrap targets to prevent duplicate execution - for (t in global.target) { - (function(t, oldTarget){ - - // Wrap it - global.target[t] = function() { - if (!oldTarget.done){ - oldTarget.done = true; - oldTarget.result = oldTarget.apply(oldTarget, arguments); - } - return oldTarget.result; - }; - - })(t, global.target[t]); - } - - // Execute desired targets - if (args.length > 0) { - args.forEach(function(arg) { - if (arg in global.target) - global.target[arg](targetArgs); - else { - console.log('no such target: ' + arg); - } - }); - } else if ('all' in global.target) { - global.target.all(targetArgs); - } - -}, 0); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json deleted file mode 100644 index a5c3299..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/package.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "name": "shelljs", - "version": "0.10.0", - "description": "Portable Unix shell commands for Node.js", - "keywords": [ - "shelljs", - "bash", - "unix", - "shell", - "makefile", - "make", - "jake", - "synchronous" - ], - "contributors": [ - "Nate Fischer (https://github.com/nfischer)", - "Brandon Freitag (https://github.com/freitagbr)" - ], - "repository": { - "type": "git", - "url": "git://github.com/shelljs/shelljs.git" - }, - "license": "BSD-3-Clause", - "homepage": "http://github.com/shelljs/shelljs", - "main": "./shell.js", - "exports": { - ".": "./shell.js", - "./global": "./global.js", - "./global.js": "./global.js", - "./make": "./make.js", - "./make.js": "./make.js", - "./package": "./package.json", - "./package.json": "./package.json", - "./plugin": "./plugin.js", - "./plugin.js": "./plugin.js" - }, - "files": [ - "global.js", - "make.js", - "plugin.js", - "shell.js", - "src" - ], - "scripts": { - "check-node-support": "node scripts/check-node-support", - "posttest": "npm run lint", - "test": "ava", - "test-with-coverage": "nyc --reporter=text --reporter=lcov ava", - "gendocs": "node scripts/generate-docs", - "lint": "eslint .", - "after-travis": "travis-check-changes", - "changelog": "shelljs-changelog", - "release:major": "shelljs-release major", - "release:minor": "shelljs-release minor", - "release:patch": "shelljs-release patch" - }, - "dependencies": { - "execa": "^5.1.1", - "fast-glob": "^3.3.2" - }, - "ava": { - "serial": true, - "workerThreads": false, - "powerAssert": false, - "files": [ - "test/*.js" - ], - "helpers": [ - "test/resources/**", - "test/utils/**" - ] - }, - "devDependencies": { - "ava": "^6.2.0", - "chalk": "^4.1.2", - "coffee-script": "^1.12.7", - "eslint": "^8.2.0", - "eslint-config-airbnb-base": "^15.0.0", - "eslint-plugin-import": "^2.31.0", - "js-yaml": "^4.1.0", - "nyc": "^17.1.0", - "shelljs-changelog": "^0.2.6", - "shelljs-release": "^0.5.3", - "shx": "^0.4.0", - "travis-check-changes": "^0.5.1" - }, - "engines": { - "node": ">=18" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js deleted file mode 100644 index 2e15850..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/plugin.js +++ /dev/null @@ -1,16 +0,0 @@ -// Various utilities exposed to plugins - -require('./shell'); // Create the ShellJS instance (mandatory) - -var common = require('./src/common'); - -var exportedAttributes = [ - 'error', // For signaling errors from within commands - 'parseOptions', // For custom option parsing - 'readFromPipe', // For commands with the .canReceivePipe attribute - 'register', // For registering plugins -]; - -exportedAttributes.forEach(function (attr) { - exports[attr] = common[attr]; -}); diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js deleted file mode 100644 index 8a3a67d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/shell.js +++ /dev/null @@ -1,216 +0,0 @@ -// -// ShellJS -// Unix shell commands on top of Node's API -// -// Copyright (c) 2012 Artur Adib -// http://github.com/shelljs/shelljs -// - -var common = require('./src/common'); - -module.exports = common.shell; - -//@ -//@ All commands run synchronously, unless otherwise stated. -//@ All commands accept standard bash globbing characters (`*`, `?`, etc.), -//@ compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). -//@ -//@ For less-commonly used commands and features, please check out our [wiki -//@ page](https://github.com/shelljs/shelljs/wiki). -//@ - -// Include the docs for all the default commands -//@commands - -// Load all default commands. We import these for their side effect of loading -// using the plugin architecture via `common.register()`. -require('./src/cat'); -require('./src/cd'); -require('./src/chmod'); -require('./src/cmd'); -require('./src/cp'); -require('./src/dirs'); -require('./src/echo'); -require('./src/exec'); -require('./src/exec-child'); // A hint to the bundler to keep exec-child.js -require('./src/find'); -require('./src/grep'); -require('./src/head'); -require('./src/ln'); -require('./src/ls'); -require('./src/mkdir'); -require('./src/mv'); -require('./src/popd'); -require('./src/pushd'); -require('./src/pwd'); -require('./src/rm'); -require('./src/sed'); -require('./src/set'); -require('./src/sort'); -require('./src/tail'); -require('./src/tempdir'); -require('./src/test'); -require('./src/to'); -require('./src/toEnd'); -require('./src/touch'); -require('./src/uniq'); -require('./src/which'); - -//@ -//@ ### exit(code) -//@ -//@ Exits the current process with the given exit `code`. -module.exports.exit = function exit(code) { - common.state.error = null; - common.state.errorCode = 0; - if (code) { - common.error('exit', { - continue: true, - code, - prefix: '', - silent: true, - fatal: false, - }); - process.exit(code); - } else { - process.exit(); - } -}; - -//@include ./src/error.js -module.exports.error = require('./src/error'); - -//@include ./src/errorCode.js -module.exports.errorCode = require('./src/errorCode'); - -//@include ./src/common.js -module.exports.ShellString = common.ShellString; - -//@ -//@ ### env['VAR_NAME'] -//@ -//@ Object containing environment variables (both getter and setter). Shortcut -//@ to `process.env`. -module.exports.env = process.env; - -//@ -//@ ### Pipes -//@ -//@ Examples: -//@ -//@ ```javascript -//@ grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); -//@ echo("files with o's in the name:\n" + ls().grep('o')); -//@ cat('test.js').exec('node'); // pipe to exec() call -//@ ``` -//@ -//@ Commands can send their output to another command in a pipe-like fashion. -//@ `sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand -//@ side of a pipe. Pipes can be chained. - -//@ -//@ ## Configuration -//@ - -module.exports.config = common.config; - -//@ -//@ ### config.silent -//@ -//@ Example: -//@ -//@ ```javascript -//@ var sh = require('shelljs'); -//@ var silentState = sh.config.silent; // save old silent state -//@ sh.config.silent = true; -//@ /* ... */ -//@ sh.config.silent = silentState; // restore old silent state -//@ ``` -//@ -//@ Suppresses all command output if `true`, except for `echo()` calls. -//@ Default is `false`. - -//@ -//@ ### config.fatal -//@ -//@ Example: -//@ -//@ ```javascript -//@ require('shelljs/global'); -//@ config.fatal = true; // or set('-e'); -//@ cp('this_file_does_not_exist', '/dev/null'); // throws Error here -//@ /* more commands... */ -//@ ``` -//@ -//@ If `true`, the script will throw a Javascript error when any shell.js -//@ command encounters an error. Default is `false`. This is analogous to -//@ Bash's `set -e`. - -//@ -//@ ### config.verbose -//@ -//@ Example: -//@ -//@ ```javascript -//@ config.verbose = true; // or set('-v'); -//@ cd('dir/'); -//@ rm('-rf', 'foo.txt', 'bar.txt'); -//@ exec('echo hello'); -//@ ``` -//@ -//@ Will print each command as follows: -//@ -//@ ``` -//@ cd dir/ -//@ rm -rf foo.txt bar.txt -//@ exec echo hello -//@ ``` - -//@ -//@ ### config.globOptions (deprecated) -//@ -//@ **Deprecated**: we recommend that you do not edit `config.globOptions`. -//@ Support for this configuration option may be changed or removed in a future -//@ ShellJS release. -//@ -//@ **Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS -//@ v0.9.x, `config.globOptions` is compatible with `fast-glob`. -//@ -//@ Example: -//@ -//@ ```javascript -//@ config.globOptions = {nodir: true}; -//@ ``` -//@ -//@ `config.globOptions` changes how ShellJS expands glob (wildcard) -//@ expressions. See -//@ [fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) -//@ for available options. Be aware that modifying `config.globOptions` **may -//@ break ShellJS functionality.** - -//@ -//@ ### config.reset() -//@ -//@ Example: -//@ -//@ ```javascript -//@ var shell = require('shelljs'); -//@ // Make changes to shell.config, and do stuff... -//@ /* ... */ -//@ shell.config.reset(); // reset to original state -//@ // Do more stuff, but with original settings -//@ /* ... */ -//@ ``` -//@ -//@ Reset `shell.config` to the defaults: -//@ -//@ ```javascript -//@ { -//@ fatal: false, -//@ globOptions: {}, -//@ maxdepth: 255, -//@ noglob: false, -//@ silent: false, -//@ verbose: false, -//@ } -//@ ``` diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js deleted file mode 100644 index ca264a9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cat.js +++ /dev/null @@ -1,76 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('cat', _cat, { - canReceivePipe: true, - cmdOptions: { - 'n': 'number', - }, -}); - -//@ -//@ ### cat([options,] file [, file ...]) -//@ ### cat([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-n`: number all output lines -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = cat('file*.txt'); -//@ var str = cat('file1', 'file2'); -//@ var str = cat(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) containing the given file, or a -//@ concatenated string containing the files if more than one file is given (a -//@ new line character is introduced between each file). -function _cat(options, files) { - var cat = common.readFromPipe(); - - if (!files && !cat) common.error('no paths given'); - - files = [].slice.call(arguments, 1); - - files.forEach(function (file) { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file); - } else if (common.statFollowLinks(file).isDirectory()) { - common.error(file + ': Is a directory'); - } - - cat += fs.readFileSync(file, 'utf8'); - }); - - if (options.number) { - cat = addNumbers(cat); - } - - return cat; -} -module.exports = _cat; - -function addNumbers(cat) { - var lines = cat.split('\n'); - var lastLine = lines.pop(); - - lines = lines.map(function (line, i) { - return numberedLine(i + 1, line); - }); - - if (lastLine.length) { - lastLine = numberedLine(lines.length + 1, lastLine); - } - lines.push(lastLine); - - return lines.join('\n'); -} - -function numberedLine(n, line) { - // GNU cat use six pad start number + tab. See http://lingrok.org/xref/coreutils/src/cat.c#57 - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/padStart - var number = (' ' + n).slice(-6) + '\t'; - return number + line; -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js deleted file mode 100644 index 1c6e73f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cd.js +++ /dev/null @@ -1,40 +0,0 @@ -var os = require('os'); -var common = require('./common'); - -common.register('cd', _cd, {}); - -//@ -//@ ### cd([dir]) -//@ -//@ Changes to directory `dir` for the duration of the script. Changes to home -//@ directory if no argument is supplied. Returns a -//@ [ShellString](#shellstringstr) to indicate success or failure. -function _cd(options, dir) { - if (!dir) dir = os.homedir(); - - if (dir === '-') { - if (!process.env.OLDPWD) { - common.error('could not find previous directory'); - } else { - dir = process.env.OLDPWD; - } - } - - try { - var curDir = process.cwd(); - process.chdir(dir); - process.env.OLDPWD = curDir; - } catch (e) { - // something went wrong, let's figure out the error - var err; - try { - common.statFollowLinks(dir); // if this succeeds, it must be some sort of file - err = 'not a directory: ' + dir; - } catch (e2) { - err = 'no such file or directory: ' + dir; - } - if (err) common.error(err); - } - return ''; -} -module.exports = _cd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js deleted file mode 100644 index b930cc7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/chmod.js +++ /dev/null @@ -1,222 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -var PERMS = (function (base) { - return { - OTHER_EXEC: base.EXEC, - OTHER_WRITE: base.WRITE, - OTHER_READ: base.READ, - - GROUP_EXEC: base.EXEC << 3, - GROUP_WRITE: base.WRITE << 3, - GROUP_READ: base.READ << 3, - - OWNER_EXEC: base.EXEC << 6, - OWNER_WRITE: base.WRITE << 6, - OWNER_READ: base.READ << 6, - - // Literal octal numbers are apparently not allowed in "strict" javascript. - STICKY: parseInt('01000', 8), - SETGID: parseInt('02000', 8), - SETUID: parseInt('04000', 8), - - TYPE_MASK: parseInt('0770000', 8), - }; -}({ - EXEC: 1, - WRITE: 2, - READ: 4, -})); - -common.register('chmod', _chmod, { -}); - -//@ -//@ ### chmod([options,] octal_mode || octal_string, file) -//@ ### chmod([options,] symbolic_mode, file) -//@ -//@ Available options: -//@ -//@ + `-v`: output a diagnostic for every file processed//@ -//@ + `-c`: like verbose, but report only when a change is made//@ -//@ + `-R`: change files and directories recursively//@ -//@ -//@ Examples: -//@ -//@ ```javascript -//@ chmod(755, '/Users/brandon'); -//@ chmod('755', '/Users/brandon'); // same as above -//@ chmod('u+x', '/Users/brandon'); -//@ chmod('-R', 'a-w', '/Users/brandon'); -//@ ``` -//@ -//@ Alters the permissions of a file or directory by either specifying the -//@ absolute permissions in octal form or expressing the changes in symbols. -//@ This command tries to mimic the POSIX behavior as much as possible. -//@ Notable exceptions: -//@ -//@ + In symbolic modes, `a-r` and `-r` are identical. No consideration is -//@ given to the `umask`. -//@ + There is no "quiet" option, since default behavior is to run silent. -//@ + Windows OS uses a very different permission model than POSIX. `chmod()` -//@ does its best on Windows, but there are limits to how file permissions can -//@ be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, -//@ so cross-platform compatibility should not be a concern there. -//@ -//@ Returns a [ShellString](#shellstringstr) indicating success or failure. -function _chmod(options, mode, filePattern) { - if (!filePattern) { - if (options.length > 0 && options.charAt(0) === '-') { - // Special case where the specified file permissions started with - to subtract perms, which - // get picked up by the option parser as command flags. - // If we are down by one argument and options starts with -, shift everything over. - [].unshift.call(arguments, ''); - } else { - common.error('You must specify a file.'); - } - } - - options = common.parseOptions(options, { - 'R': 'recursive', - 'c': 'changes', - 'v': 'verbose', - }); - - filePattern = [].slice.call(arguments, 2); - - var files; - - // TODO: replace this with a call to common.expand() - if (options.recursive) { - files = []; - filePattern.forEach(function addFile(expandedFile) { - var stat = common.statNoFollowLinks(expandedFile); - - if (!stat.isSymbolicLink()) { - files.push(expandedFile); - - if (stat.isDirectory()) { // intentionally does not follow symlinks. - fs.readdirSync(expandedFile).forEach(function (child) { - addFile(expandedFile + '/' + child); - }); - } - } - }); - } else { - files = filePattern; - } - - files.forEach(function innerChmod(file) { - file = path.resolve(file); - if (!fs.existsSync(file)) { - common.error('File not found: ' + file); - } - - // When recursing, don't follow symlinks. - if (options.recursive && common.statNoFollowLinks(file).isSymbolicLink()) { - return; - } - - var stat = common.statFollowLinks(file); - var isDir = stat.isDirectory(); - var perms = stat.mode; - var type = perms & PERMS.TYPE_MASK; - - var newPerms = perms; - - if (Number.isNaN(parseInt(mode, 8))) { - // parse options - mode.split(',').forEach(function (symbolicMode) { - var pattern = /([ugoa]*)([=+-])([rwxXst]*)/i; - var matches = pattern.exec(symbolicMode); - - if (matches) { - var applyTo = matches[1]; - var operator = matches[2]; - var change = matches[3]; - - var changeOwner = applyTo.includes('u') || applyTo === 'a' || applyTo === ''; - var changeGroup = applyTo.includes('g') || applyTo === 'a' || applyTo === ''; - var changeOther = applyTo.includes('o') || applyTo === 'a' || applyTo === ''; - - var changeRead = change.includes('r'); - var changeWrite = change.includes('w'); - var changeExec = change.includes('x'); - var changeExecDir = change.includes('X'); - var changeSticky = change.includes('t'); - var changeSetuid = change.includes('s'); - - if (changeExecDir && isDir) { - changeExec = true; - } - - var mask = 0; - if (changeOwner) { - mask |= (changeRead ? PERMS.OWNER_READ : 0) + (changeWrite ? PERMS.OWNER_WRITE : 0) + (changeExec ? PERMS.OWNER_EXEC : 0) + (changeSetuid ? PERMS.SETUID : 0); - } - if (changeGroup) { - mask |= (changeRead ? PERMS.GROUP_READ : 0) + (changeWrite ? PERMS.GROUP_WRITE : 0) + (changeExec ? PERMS.GROUP_EXEC : 0) + (changeSetuid ? PERMS.SETGID : 0); - } - if (changeOther) { - mask |= (changeRead ? PERMS.OTHER_READ : 0) + (changeWrite ? PERMS.OTHER_WRITE : 0) + (changeExec ? PERMS.OTHER_EXEC : 0); - } - - // Sticky bit is special - it's not tied to user, group or other. - if (changeSticky) { - mask |= PERMS.STICKY; - } - - switch (operator) { - case '+': - newPerms |= mask; - break; - - case '-': - newPerms &= ~mask; - break; - - case '=': - newPerms = type + mask; - - // According to POSIX, when using = to explicitly set the - // permissions, setuid and setgid can never be cleared. - if (common.statFollowLinks(file).isDirectory()) { - newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; - } - break; - default: - common.error('Could not recognize operator: `' + operator + '`'); - } - - if (options.verbose) { - console.log(file + ' -> ' + newPerms.toString(8)); - } - - if (perms !== newPerms) { - if (!options.verbose && options.changes) { - console.log(file + ' -> ' + newPerms.toString(8)); - } - fs.chmodSync(file, newPerms); - perms = newPerms; // for the next round of changes! - } - } else { - common.error('Invalid symbolic mode change: ' + symbolicMode); - } - }); - } else { - // they gave us a full number - newPerms = type + parseInt(mode, 8); - - // POSIX rules are that setuid and setgid can only be added using numeric - // form, but not cleared. - if (common.statFollowLinks(file).isDirectory()) { - newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; - } - - fs.chmodSync(file, newPerms); - } - }); - return ''; -} -module.exports = _chmod; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js deleted file mode 100644 index a00d6c4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cmd.js +++ /dev/null @@ -1,138 +0,0 @@ -var execa = require('execa'); -var common = require('./common'); - -var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; -var COMMAND_NOT_FOUND_ERROR_CODE = 127; - -common.register('cmd', _cmd, { - cmdOptions: null, - globStart: 1, - canReceivePipe: true, - wrapOutput: true, -}); - -function isCommandNotFound(execaResult) { - if (process.platform === 'win32') { - var str = 'is not recognized as an internal or external command'; - return execaResult.exitCode && execaResult.stderr.includes(str); - } - return execaResult.failed && execaResult.code === 'ENOENT'; -} - -function isExecaInternalError(result) { - if (typeof result.stdout !== 'string') return true; - if (typeof result.stderr !== 'string') return true; - if (typeof result.exitCode !== 'number') return true; - if (result.exitCode === 0 && result.failed) return true; - // Otherwise assume this executed correctly. The command may still have exited - // with non-zero status, but that's not due to anything execa did. - return false; -} - -//@ -//@ ### cmd(arg1[, arg2, ...] [, options]) -//@ -//@ Available options: -//@ -//@ + `cwd: directoryPath`: change the current working directory only for this -//@ cmd() invocation. -//@ + `maxBuffer: num`: Raise or decrease the default buffer size for -//@ stdout/stderr. -//@ + `timeout`: Change the default timeout. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var version = cmd('node', '--version').stdout; -//@ cmd('git', 'commit', '-am', `Add suport for node ${version}`); -//@ console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) -//@ console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) -//@ ``` -//@ -//@ Executes the given command synchronously. This is intended as an easier -//@ alternative for [exec()](#execcommand--options--callback), with better -//@ security around globbing, comamnd injection, and variable expansion. This is -//@ guaranteed to only run one external command, and won't give special -//@ treatment for any shell characters (ex. this treats `|` as a literal -//@ character, not as a shell pipeline). -//@ This returns a [ShellString](#shellstringstr). -//@ -//@ By default, this performs globbing on all platforms, but you can disable -//@ this with `set('-f')`. -//@ -//@ This **does not** support asynchronous mode. If you need asynchronous -//@ command execution, check out [execa](https://www.npmjs.com/package/execa) or -//@ the node builtin `child_process.execFile()` instead. -function _cmd(options, command, commandArgs, userOptions) { - if (!command) { - common.error('Must specify a non-empty string as a command'); - } - - // `options` will usually not have a value: it's added by our commandline flag - // parsing engine. - commandArgs = [].slice.call(arguments, 2); - - // `userOptions` may or may not be provided. We need to check the last - // argument. If it's an object, assume it's meant to be passed as - // userOptions (since ShellStrings are already flattened to strings). - if (commandArgs.length === 0) { - userOptions = {}; - } else { - var lastArg = commandArgs.pop(); - if (common.isObject(lastArg)) { - userOptions = lastArg; - } else { - userOptions = {}; - commandArgs.push(lastArg); - } - } - - var pipe = common.readFromPipe(); - - // Some of our defaults differ from execa's defaults. These can be overridden - // by the user. - var defaultOptions = { - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - stripFinalNewline: false, // Preserve trailing newlines for consistency with unix. - reject: false, // Use ShellJS's error handling system. - }; - - // For other options, we forbid the user from overriding them (either for - // correctness or security). - var requiredOptions = { - input: pipe, - shell: false, - }; - - var execaOptions = - Object.assign(defaultOptions, userOptions, requiredOptions); - - var result = execa.sync(command, commandArgs, execaOptions); - var stdout; - var stderr; - var code; - if (isCommandNotFound(result)) { - // This can happen if `command` is not an executable binary, or possibly - // under other conditions. - stdout = ''; - stderr = "'" + command + "': command not found"; - code = COMMAND_NOT_FOUND_ERROR_CODE; - } else if (isExecaInternalError(result)) { - // Catch-all: execa tried to run `command` but it encountered some error - // (ex. maxBuffer, timeout). - stdout = result.stdout || ''; - stderr = result.stderr || - `'${command}' encountered an error during execution`; - code = result.exitCode !== undefined && result.exitCode > 0 ? result.exitCode : 1; - } else { - // Normal exit: execa was able to execute `command` and get a return value. - stdout = result.stdout.toString(); - stderr = result.stderr.toString(); - code = result.exitCode; - } - - // Pass `continue: true` so we can specify a value for stdout. - if (code) common.error(stderr, code, { silent: true, continue: true }); - return new common.ShellString(stdout, stderr, code); -} -module.exports = _cmd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js deleted file mode 100644 index b9ffeda..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/common.js +++ /dev/null @@ -1,545 +0,0 @@ -// Ignore warning about 'new String()' and use of the Buffer constructor -/* eslint no-new-wrappers: "off", - no-buffer-constructor: "off" */ - -'use strict'; - -var os = require('os'); -var fs = require('fs'); -var glob = require('fast-glob'); - -var shell = {}; -exports.shell = shell; - -var shellMethods = Object.create(shell); - -exports.extend = Object.assign; - -// Check if we're running under electron -var isElectron = Boolean(process.versions.electron); - -// Module globals (assume no execPath by default) -var DEFAULT_CONFIG = { - fatal: false, - globOptions: {}, - maxdepth: 255, - noglob: false, - silent: false, - verbose: false, - execPath: null, - bufLength: 64 * 1024, // 64KB -}; - -var config = { - reset() { - Object.assign(this, DEFAULT_CONFIG); - if (!isElectron) { - this.execPath = process.execPath; - } - }, - resetForTesting() { - this.reset(); - this.silent = true; - }, -}; - -config.reset(); -exports.config = config; - -// Note: commands should generally consider these as read-only values. -var state = { - error: null, - errorCode: 0, - currentCmd: 'shell.js', -}; -exports.state = state; - -delete process.env.OLDPWD; // initially, there's no previous directory - -// Reliably test if something is any sort of javascript object -function isObject(a) { - return typeof a === 'object' && a !== null; -} -exports.isObject = isObject; - -function log() { - /* istanbul ignore next */ - if (!config.silent) { - console.error.apply(console, arguments); - } -} -exports.log = log; - -// Converts strings to be equivalent across all platforms. Primarily responsible -// for making sure we use '/' instead of '\' as path separators, but this may be -// expanded in the future if necessary -function convertErrorOutput(msg) { - if (typeof msg !== 'string') { - throw new TypeError('input must be a string'); - } - return msg.replace(/\\/g, '/'); -} -exports.convertErrorOutput = convertErrorOutput; - -// An exception class to help propagate command errors (e.g., non-zero exit -// status) up to the top-level. {@param value} should be a ShellString. -class CommandError extends Error { - constructor(value) { - super(value.toString()); - this.returnValue = value; - } -} -exports.CommandError = CommandError; // visible for testing - -// Shows error message. Throws if fatal is true (defaults to config.fatal, overridable with options.fatal) -function error(msg, _code, options) { - // Validate input - if (typeof msg !== 'string') throw new Error('msg must be a string'); - - var DEFAULT_OPTIONS = { - continue: false, - code: 1, - prefix: state.currentCmd + ': ', - silent: false, - fatal: config.fatal, - }; - - if (typeof _code === 'number' && isObject(options)) { - options.code = _code; - } else if (isObject(_code)) { // no 'code' - options = _code; - } else if (typeof _code === 'number') { // no 'options' - options = { code: _code }; - } else if (typeof _code !== 'number') { // only 'msg' - options = {}; - } - options = Object.assign({}, DEFAULT_OPTIONS, options); - - if (!state.errorCode) state.errorCode = options.code; - - var logEntry = convertErrorOutput(options.prefix + msg); - state.error = state.error ? state.error + '\n' : ''; - state.error += logEntry; - - // Throw an error, or log the entry - if (options.fatal) { - var err = new Error(logEntry); - err.code = options.code; - throw err; - } - if (msg.length > 0 && !options.silent) log(logEntry); - - if (!options.continue) { - throw new CommandError(new ShellString('', state.error, state.errorCode)); - } -} -exports.error = error; - -//@ -//@ ### ShellString(str) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var foo = new ShellString('hello world'); -//@ ``` -//@ -//@ This is a dedicated type returned by most ShellJS methods, which wraps a -//@ string (or array) value. This has all the string (or array) methods, but -//@ also exposes extra methods: [`.to()`](#shellstringprototypetofile), -//@ [`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods -//@ (ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string -//@ by calling `.toString()`. -//@ -//@ This type also exposes the corresponding command's stdout, stderr, and -//@ return status code via the `.stdout` (string), `.stderr` (string), and -//@ `.code` (number) properties respectively. -function ShellString(stdout, stderr, code) { - var that; - if (stdout instanceof Array) { - that = stdout; - that.stdout = stdout.join('\n'); - if (stdout.length > 0) that.stdout += '\n'; - } else { - that = new String(stdout); - that.stdout = stdout; - } - that.stderr = stderr; - that.code = code; - // A list of all commands that can appear on the right-hand side of a pipe - // (populated by calls to common.wrap()) - pipeMethods.forEach(function (cmd) { - that[cmd] = shellMethods[cmd].bind(that); - }); - return that; -} - -exports.ShellString = ShellString; - -// Returns {'alice': true, 'bob': false} when passed a string and dictionary as follows: -// parseOptions('-a', {'a':'alice', 'b':'bob'}); -// Returns {'reference': 'string-value', 'bob': false} when passed two dictionaries of the form: -// parseOptions({'-r': 'string-value'}, {'r':'reference', 'b':'bob'}); -// Throws an error when passed a string that does not start with '-': -// parseOptions('a', {'a':'alice'}); // throws -function parseOptions(opt, map, errorOptions) { - errorOptions = errorOptions || {}; - // Validate input - if (typeof opt !== 'string' && !isObject(opt)) { - throw new TypeError('options must be strings or key-value pairs'); - } else if (!isObject(map)) { - throw new TypeError('parseOptions() internal error: map must be an object'); - } else if (!isObject(errorOptions)) { - throw new TypeError( - 'parseOptions() internal error: errorOptions must be object', - ); - } - - if (opt === '--') { - // This means there are no options. - return {}; - } - - // All options are false by default - var options = {}; - Object.keys(map).forEach(function (letter) { - var optName = map[letter]; - if (optName[0] !== '!') { - options[optName] = false; - } - }); - - if (opt === '') return options; // defaults - - if (typeof opt === 'string') { - if (opt[0] !== '-') { - throw new Error("Options string must start with a '-'"); - } - - // e.g. chars = ['R', 'f'] - var chars = opt.slice(1).split(''); - - chars.forEach(function (c) { - if (c in map) { - var optionName = map[c]; - if (optionName[0] === '!') { - options[optionName.slice(1)] = false; - } else { - options[optionName] = true; - } - } else { - error('option not recognized: ' + c, errorOptions); - } - }); - } else { // opt is an Object - Object.keys(opt).forEach(function (key) { - if (key[0] === '-') { - // key is a string of the form '-r', '-d', etc. - var c = key[1]; - if (c in map) { - var optionName = map[c]; - options[optionName] = opt[key]; // assign the given value - } else { - error('option not recognized: ' + c, errorOptions); - } - } else if (key in options) { - // key is a "long option", so it should be the same - options[key] = opt[key]; - } else { - error('option not recognized: {' + key + ':...}', errorOptions); - } - }); - } - return options; -} -exports.parseOptions = parseOptions; - -function globOptions() { - // These options are just to make fast-glob be compatible with POSIX (bash) - // wildcard behavior. - var defaultGlobOptions = { - onlyFiles: false, - followSymbolicLinks: false, - }; - - var newGlobOptions = Object.assign({}, config.globOptions); - var optionRenames = { - // node-glob's 'nodir' is not quote the same as fast-glob's 'onlyFiles'. - // Compatibility for this is implemented at the call site. - mark: 'markDirectories', - matchBase: 'baseNameMatch', - }; - Object.keys(optionRenames).forEach(function (oldKey) { - var newKey = optionRenames[oldKey]; - if (oldKey in config.globOptions) { - newGlobOptions[newKey] = config.globOptions[oldKey]; - } - }); - var invertedOptionRenames = { - nobrace: 'braceExpansion', - noglobstar: 'globstar', - noext: 'extglob', - nocase: 'caseSensitiveMatch', - }; - Object.keys(invertedOptionRenames).forEach(function (oldKey) { - var newKey = invertedOptionRenames[oldKey]; - if (oldKey in config.globOptions) { - newGlobOptions[newKey] = !config.globOptions[oldKey]; - } - }); - return Object.assign({}, defaultGlobOptions, newGlobOptions); -} - -// Expands wildcards with matching (ie. existing) file names. -// For example: -// expand(['file*.js']) = ['file1.js', 'file2.js', ...] -// (if the files 'file1.js', 'file2.js', etc, exist in the current dir) -function expand(list) { - if (!Array.isArray(list)) { - throw new TypeError('must be an array'); - } - var expanded = []; - list.forEach(function (listEl) { - // Don't expand non-strings - if (typeof listEl !== 'string') { - expanded.push(listEl); - } else { - var ret; - var globOpts = globOptions(); - try { - ret = glob.sync(listEl, globOpts); - } catch (e) { - // if glob fails, interpret the string literally - ret = [listEl]; - } - // if nothing matched, interpret the string literally - ret = ret.length > 0 ? ret.sort() : [listEl]; - if (globOpts.nodir) { - ret = ret.filter(function (file) { - return !statNoFollowLinks(file).isDirectory(); - }); - } - expanded = expanded.concat(ret); - } - }); - return expanded; -} -exports.expand = expand; - -// Normalizes Buffer creation, using Buffer.alloc if possible. -// Also provides a good default buffer length for most use cases. -var buffer = typeof Buffer.alloc === 'function' ? - function (len) { - return Buffer.alloc(len || config.bufLength); - } : - function (len) { - return new Buffer(len || config.bufLength); - }; -exports.buffer = buffer; - -// Normalizes _unlinkSync() across platforms to match Unix behavior, i.e. -// file can be unlinked even if it's read-only, see https://github.com/joyent/node/issues/3006 -function unlinkSync(file) { - try { - fs.unlinkSync(file); - } catch (e) { - // Try to override file permission - /* istanbul ignore next */ - if (e.code === 'EPERM') { - fs.chmodSync(file, '0666'); - fs.unlinkSync(file); - } else { - throw e; - } - } -} -exports.unlinkSync = unlinkSync; - -// wrappers around common.statFollowLinks and common.statNoFollowLinks that clarify intent -// and improve readability -function statFollowLinks() { - return fs.statSync.apply(fs, arguments); -} -exports.statFollowLinks = statFollowLinks; - -function statNoFollowLinks() { - return fs.lstatSync.apply(fs, arguments); -} -exports.statNoFollowLinks = statNoFollowLinks; - -// e.g. 'shelljs_a5f185d0443ca...' -function randomFileName() { - function randomHash(count) { - if (count === 1) { - return parseInt(16 * Math.random(), 10).toString(16); - } - var hash = ''; - for (var i = 0; i < count; i++) { - hash += randomHash(1); - } - return hash; - } - - return 'shelljs_' + randomHash(20); -} -exports.randomFileName = randomFileName; - -// Common wrapper for all Unix-like commands that performs glob expansion, -// command-logging, and other nice things -function wrap(cmd, fn, options) { - options = options || {}; - return function () { - var retValue = null; - - state.currentCmd = cmd; - state.error = null; - state.errorCode = 0; - - try { - var args = [].slice.call(arguments, 0); - - // Log the command to stderr, if appropriate - if (config.verbose) { - console.error.apply(console, [cmd].concat(args)); - } - - // If this is coming from a pipe, let's set the pipedValue (otherwise, set - // it to the empty string) - state.pipedValue = (this && typeof this.stdout === 'string') ? this.stdout : ''; - - if (options.unix === false) { // this branch is for exec() - retValue = fn.apply(this, args); - } else { // and this branch is for everything else - if (isObject(args[0]) && args[0].constructor.name === 'Object') { - // a no-op, allowing the syntax `touch({'-r': file}, ...)` - } else if (args.length === 0 || typeof args[0] !== 'string' || args[0].length <= 1 || args[0][0] !== '-') { - args.unshift(''); // only add dummy option if '-option' not already present - } - - // flatten out arrays that are arguments, to make the syntax: - // `cp([file1, file2, file3], dest);` - // equivalent to: - // `cp(file1, file2, file3, dest);` - args = args.reduce(function (accum, cur) { - if (Array.isArray(cur)) { - return accum.concat(cur); - } - accum.push(cur); - return accum; - }, []); - - // Convert ShellStrings (basically just String objects) to regular strings - args = args.map(function (arg) { - if (isObject(arg) && arg.constructor.name === 'String') { - return arg.toString(); - } - return arg; - }); - - // Expand the '~' if appropriate - var homeDir = os.homedir(); - args = args.map(function (arg) { - if (typeof arg === 'string' && arg.slice(0, 2) === '~/' || arg === '~') { - return arg.replace(/^~/, homeDir); - } - return arg; - }); - - // Perform glob-expansion on all arguments after globStart, but preserve - // the arguments before it (like regexes for sed and grep) - if (!config.noglob && options.allowGlobbing === true) { - args = args.slice(0, options.globStart).concat(expand(args.slice(options.globStart))); - } - - try { - // parse options if options are provided - if (isObject(options.cmdOptions)) { - args[0] = parseOptions(args[0], options.cmdOptions); - } - - retValue = fn.apply(this, args); - } catch (e) { - /* istanbul ignore else */ - if (e instanceof CommandError) { - retValue = e.returnValue; - } else { - throw e; // this is probably a bug that should be thrown up the call stack - } - } - } - } catch (e) { - /* istanbul ignore next */ - if (!state.error) { - // If state.error hasn't been set it's an error thrown by Node, not us - probably a bug... - e.name = 'ShellJSInternalError'; - throw e; - } - if (config.fatal || options.handlesFatalDynamically) throw e; - } - - if (options.wrapOutput && - (typeof retValue === 'string' || Array.isArray(retValue))) { - retValue = new ShellString(retValue, state.error, state.errorCode); - } - - state.currentCmd = 'shell.js'; - return retValue; - }; -} // wrap -exports.wrap = wrap; - -// This returns all the input that is piped into the current command (or the -// empty string, if this isn't on the right-hand side of a pipe -function _readFromPipe() { - return state.pipedValue; -} -exports.readFromPipe = _readFromPipe; - -var DEFAULT_WRAP_OPTIONS = { - allowGlobbing: true, - canReceivePipe: false, - cmdOptions: null, - globStart: 1, - handlesFatalDynamically: false, - pipeOnly: false, - wrapOutput: true, - unix: true, -}; - -// This is populated during plugin registration -var pipeMethods = []; - -// Register a new ShellJS command -function _register(name, implementation, wrapOptions) { - wrapOptions = wrapOptions || {}; - - // Validate options - Object.keys(wrapOptions).forEach(function (option) { - if (!DEFAULT_WRAP_OPTIONS.hasOwnProperty(option)) { - throw new Error("Unknown option '" + option + "'"); - } - if (typeof wrapOptions[option] !== typeof DEFAULT_WRAP_OPTIONS[option]) { - throw new TypeError("Unsupported type '" + typeof wrapOptions[option] + - "' for option '" + option + "'"); - } - }); - - // If an option isn't specified, use the default - wrapOptions = Object.assign({}, DEFAULT_WRAP_OPTIONS, wrapOptions); - - if (shell.hasOwnProperty(name)) { - throw new Error('Command `' + name + '` already exists'); - } - - if (wrapOptions.pipeOnly) { - wrapOptions.canReceivePipe = true; - shellMethods[name] = wrap(name, implementation, wrapOptions); - } else { - shell[name] = wrap(name, implementation, wrapOptions); - } - - if (wrapOptions.canReceivePipe) { - pipeMethods.push(name); - } -} -exports.register = _register; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js deleted file mode 100644 index af4a0a1..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/cp.js +++ /dev/null @@ -1,314 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('cp', _cp, { - cmdOptions: { - 'f': '!no_force', - 'n': 'no_force', - 'u': 'update', - 'R': 'recursive', - 'r': 'recursive', - 'L': 'followsymlink', - 'P': 'noFollowsymlink', - 'p': 'preserve', - }, - wrapOutput: false, -}); - -// Buffered file copy, synchronous -// (Using readFileSync() + writeFileSync() could easily cause a memory overflow -// with large files) -function copyFileSync(srcFile, destFile, options) { - if (!fs.existsSync(srcFile)) { - common.error('copyFileSync: no such file or directory: ' + srcFile); - } - - var isWindows = process.platform === 'win32'; - - // Check the mtimes of the files if the '-u' flag is provided - try { - if (options.update && common.statFollowLinks(srcFile).mtime < fs.statSync(destFile).mtime) { - return; - } - } catch (e) { - // If we're here, destFile probably doesn't exist, so just do a normal copy - } - - if (common.statNoFollowLinks(srcFile).isSymbolicLink() && !options.followsymlink) { - try { - common.statNoFollowLinks(destFile); - common.unlinkSync(destFile); // re-link it - } catch (e) { - // it doesn't exist, so no work needs to be done - } - - var symlinkFull = fs.readlinkSync(srcFile); - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - } else { - var buf = common.buffer(); - var bufLength = buf.length; - var bytesRead = bufLength; - var pos = 0; - var fdr = null; - var fdw = null; - var srcStat = common.statFollowLinks(srcFile); - - try { - fdr = fs.openSync(srcFile, 'r'); - } catch (e) { - /* istanbul ignore next */ - common.error('copyFileSync: could not read src file (' + srcFile + ')'); - } - - try { - fdw = fs.openSync(destFile, 'w', srcStat.mode); - } catch (e) { - /* istanbul ignore next */ - common.error('copyFileSync: could not write to dest file (code=' + e.code + '):' + destFile); - } - - while (bytesRead === bufLength) { - bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); - fs.writeSync(fdw, buf, 0, bytesRead); - pos += bytesRead; - } - - if (options.preserve) { - fs.fchownSync(fdw, srcStat.uid, srcStat.gid); - // Note: utimesSync does not work (rounds to seconds), but futimesSync has - // millisecond precision. - fs.futimesSync(fdw, srcStat.atime, srcStat.mtime); - } - - fs.closeSync(fdr); - fs.closeSync(fdw); - } -} - -// Recursively copies 'sourceDir' into 'destDir' -// Adapted from https://github.com/ryanmcgrath/wrench-js -// -// Copyright (c) 2010 Ryan McGrath -// Copyright (c) 2012 Artur Adib -// -// Licensed under the MIT License -// http://www.opensource.org/licenses/mit-license.php -function cpdirSyncRecursive(sourceDir, destDir, currentDepth, opts) { - if (!opts) opts = {}; - - // Ensure there is not a run away recursive copy - if (currentDepth >= common.config.maxdepth) return; - currentDepth++; - - var isWindows = process.platform === 'win32'; - - // Create the directory where all our junk is moving to; read the mode/etc. of - // the source directory (we'll set this on the destDir at the end). - var checkDir = common.statFollowLinks(sourceDir); - try { - fs.mkdirSync(destDir); - } catch (e) { - // if the directory already exists, that's okay - if (e.code !== 'EEXIST') throw e; - } - - var files = fs.readdirSync(sourceDir); - - for (var i = 0; i < files.length; i++) { - var srcFile = sourceDir + '/' + files[i]; - var destFile = destDir + '/' + files[i]; - var srcFileStat = common.statNoFollowLinks(srcFile); - - var symlinkFull; - if (opts.followsymlink) { - if (cpcheckcycle(sourceDir, srcFile)) { - // Cycle link found. - console.error('Cycle link found.'); - symlinkFull = fs.readlinkSync(srcFile); - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - continue; - } - } - if (srcFileStat.isDirectory()) { - /* recursion this thing right on back. */ - cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); - } else if (srcFileStat.isSymbolicLink() && !opts.followsymlink) { - symlinkFull = fs.readlinkSync(srcFile); - try { - common.statNoFollowLinks(destFile); - common.unlinkSync(destFile); // re-link it - } catch (e) { - // it doesn't exist, so no work needs to be done - } - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - } else if (srcFileStat.isSymbolicLink() && opts.followsymlink) { - srcFileStat = common.statFollowLinks(srcFile); - if (srcFileStat.isDirectory()) { - cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); - } else { - copyFileSync(srcFile, destFile, opts); - } - } else if (fs.existsSync(destFile) && opts.no_force) { - common.log('skipping existing file: ' + files[i]); - } else { - copyFileSync(srcFile, destFile, opts); - } - } // for files - - // finally change the mode for the newly created directory (otherwise, we - // couldn't add files to a read-only directory). - // var checkDir = common.statFollowLinks(sourceDir); - if (opts.preserve) { - fs.utimesSync(destDir, checkDir.atime, checkDir.mtime); - } - fs.chmodSync(destDir, checkDir.mode); -} // cpdirSyncRecursive - -// Checks if cureent file was created recently -function checkRecentCreated(sources, index) { - var lookedSource = sources[index]; - return sources.slice(0, index).some(function (src) { - return path.basename(src) === path.basename(lookedSource); - }); -} - -function cpcheckcycle(sourceDir, srcFile) { - var srcFileStat = common.statNoFollowLinks(srcFile); - if (srcFileStat.isSymbolicLink()) { - // Do cycle check. For example: - // $ mkdir -p 1/2/3/4 - // $ cd 1/2/3/4 - // $ ln -s ../../3 link - // $ cd ../../../.. - // $ cp -RL 1 copy - var cyclecheck = common.statFollowLinks(srcFile); - if (cyclecheck.isDirectory()) { - var sourcerealpath = fs.realpathSync(sourceDir); - var symlinkrealpath = fs.realpathSync(srcFile); - var re = new RegExp(symlinkrealpath); - if (re.test(sourcerealpath)) { - return true; - } - } - } - return false; -} - -//@ -//@ ### cp([options,] source [, source ...], dest) -//@ ### cp([options,] source_array, dest) -//@ -//@ Available options: -//@ -//@ + `-f`: force (default behavior) -//@ + `-n`: no-clobber -//@ + `-u`: only copy if `source` is newer than `dest` -//@ + `-r`, `-R`: recursive -//@ + `-L`: follow symlinks -//@ + `-P`: don't follow symlinks -//@ + `-p`: preserve file mode, ownership, and timestamps -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cp('file1', 'dir1'); -//@ cp('-R', 'path/to/dir/', '~/newCopy/'); -//@ cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); -//@ cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above -//@ ``` -//@ -//@ Copies files. Returns a [ShellString](#shellstringstr) indicating success -//@ or failure. -function _cp(options, sources, dest) { - // If we're missing -R, it actually implies -L (unless -P is explicit) - if (options.followsymlink) { - options.noFollowsymlink = false; - } - if (!options.recursive && !options.noFollowsymlink) { - options.followsymlink = true; - } - - // Get sources, dest - if (arguments.length < 3) { - common.error('missing and/or '); - } else { - sources = [].slice.call(arguments, 1, arguments.length - 1); - dest = arguments[arguments.length - 1]; - } - - var destExists = fs.existsSync(dest); - var destStat = destExists && common.statFollowLinks(dest); - - // Dest is not existing dir, but multiple sources given - if ((!destExists || !destStat.isDirectory()) && sources.length > 1) { - common.error('dest is not a directory (too many sources)'); - } - - // Dest is an existing file, but -n is given - if (destExists && destStat.isFile() && options.no_force) { - return new common.ShellString('', '', 0); - } - - sources.forEach(function (src, srcIndex) { - if (!fs.existsSync(src)) { - if (src === '') src = "''"; // if src was empty string, display empty string - common.error('no such file or directory: ' + src, { continue: true }); - return; // skip file - } - var srcStat = common.statFollowLinks(src); - if (!options.noFollowsymlink && srcStat.isDirectory()) { - if (!options.recursive) { - // Non-Recursive - common.error("omitting directory '" + src + "'", { continue: true }); - } else { - // Recursive - // 'cp /a/source dest' should create 'source' in 'dest' - var newDest = (destStat && destStat.isDirectory()) ? - path.join(dest, path.basename(src)) : - dest; - - try { - common.statFollowLinks(path.dirname(dest)); - cpdirSyncRecursive(src, newDest, 0, options); - } catch (e) { - /* istanbul ignore next */ - common.error("cannot create directory '" + dest + "': No such file or directory"); - } - } - } else { - // If here, src is a file - - // When copying to '/path/dir': - // thisDest = '/path/dir/file1' - var thisDest = dest; - if (destStat && destStat.isDirectory()) { - thisDest = path.normalize(dest + '/' + path.basename(src)); - } - - var thisDestExists = fs.existsSync(thisDest); - if (thisDestExists && checkRecentCreated(sources, srcIndex)) { - // cannot overwrite file created recently in current execution, but we want to continue copying other files - if (!options.no_force) { - common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); - } - return; - } - - if (thisDestExists && options.no_force) { - return; // skip file - } - - if (path.relative(src, thisDest) === '') { - // a file cannot be copied to itself, but we want to continue copying other files - common.error("'" + thisDest + "' and '" + src + "' are the same file", { continue: true }); - return; - } - - copyFileSync(src, thisDest, options); - } - }); // forEach(src) - - return new common.ShellString('', common.state.error, common.state.errorCode); -} -module.exports = _cp; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js deleted file mode 100644 index 9b7251d..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/dirs.js +++ /dev/null @@ -1,210 +0,0 @@ -var path = require('path'); -var common = require('./common'); -var _cd = require('./cd'); - -common.register('dirs', _dirs, { - wrapOutput: false, -}); -common.register('pushd', _pushd, { - wrapOutput: false, -}); -common.register('popd', _popd, { - wrapOutput: false, -}); - -// Pushd/popd/dirs internals -var _dirStack = []; - -function _isStackIndex(index) { - return (/^[-+]\d+$/).test(index); -} - -function _parseStackIndex(index) { - if (_isStackIndex(index)) { - if (Math.abs(index) < _dirStack.length + 1) { // +1 for pwd - return (/^-/).test(index) ? Number(index) - 1 : Number(index); - } - common.error(index + ': directory stack index out of range'); - } else { - common.error(index + ': invalid number'); - } -} - -function _actualDirStack() { - return [process.cwd()].concat(_dirStack); -} - -//@ -//@ ### pushd([options,] [dir | '-N' | '+N']) -//@ -//@ Available options: -//@ -//@ + `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. -//@ + `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -//@ + `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ // process.cwd() === '/usr' -//@ pushd('/etc'); // Returns /etc /usr -//@ pushd('+1'); // Returns /usr /etc -//@ ``` -//@ -//@ Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. -function _pushd(options, dir) { - if (_isStackIndex(options)) { - dir = options; - options = ''; - } - - options = common.parseOptions(options, { - 'n': 'no-cd', - 'q': 'quiet', - }); - - var dirs = _actualDirStack(); - - if (dir === '+0') { - return dirs; // +0 is a noop - } else if (!dir) { - if (dirs.length > 1) { - dirs = dirs.splice(1, 1).concat(dirs); - } else { - return common.error('no other directory'); - } - } else if (_isStackIndex(dir)) { - var n = _parseStackIndex(dir); - dirs = dirs.slice(n).concat(dirs.slice(0, n)); - } else if (options['no-cd']) { - dirs.splice(1, 0, dir); - } else { - dirs.unshift(dir); - } - - if (options['no-cd']) { - dirs = dirs.slice(1); - } else { - dir = path.resolve(dirs.shift()); - _cd('', dir); - } - - _dirStack = dirs; - return _dirs(options.quiet ? '-q' : ''); -} -exports.pushd = _pushd; - -//@ -//@ -//@ ### popd([options,] ['-N' | '+N']) -//@ -//@ Available options: -//@ -//@ + `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. -//@ + `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ echo(process.cwd()); // '/usr' -//@ pushd('/etc'); // '/etc /usr' -//@ echo(process.cwd()); // '/etc' -//@ popd(); // '/usr' -//@ echo(process.cwd()); // '/usr' -//@ ``` -//@ -//@ When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. -function _popd(options, index) { - if (_isStackIndex(options)) { - index = options; - options = ''; - } - - options = common.parseOptions(options, { - 'n': 'no-cd', - 'q': 'quiet', - }); - - if (!_dirStack.length) { - return common.error('directory stack empty'); - } - - index = _parseStackIndex(index || '+0'); - - if (options['no-cd'] || index > 0 || _dirStack.length + index === 0) { - index = index > 0 ? index - 1 : index; - _dirStack.splice(index, 1); - } else { - var dir = path.resolve(_dirStack.shift()); - _cd('', dir); - } - - return _dirs(options.quiet ? '-q' : ''); -} -exports.popd = _popd; - -//@ -//@ -//@ ### dirs([options | '+N' | '-N']) -//@ -//@ Available options: -//@ -//@ + `-c`: Clears the directory stack by deleting all of the elements. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. -//@ + `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. -//@ -//@ Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. -//@ -//@ See also: `pushd`, `popd` -function _dirs(options, index) { - if (_isStackIndex(options)) { - index = options; - options = ''; - } - - options = common.parseOptions(options, { - 'c': 'clear', - 'q': 'quiet', - }); - - if (options.clear) { - _dirStack = []; - return _dirStack; - } - - var stack = _actualDirStack(); - - if (index) { - index = _parseStackIndex(index); - - if (index < 0) { - index = stack.length + index; - } - - if (!options.quiet) { - common.log(stack[index]); - } - return stack[index]; - } - - if (!options.quiet) { - common.log(stack.join(' ')); - } - - return stack; -} -exports.dirs = _dirs; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js deleted file mode 100644 index da37f43..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/echo.js +++ /dev/null @@ -1,62 +0,0 @@ -var format = require('util').format; - -var common = require('./common'); - -common.register('echo', _echo, { - allowGlobbing: false, -}); - -//@ -//@ ### echo([options,] string [, string ...]) -//@ -//@ Available options: -//@ -//@ + `-e`: interpret backslash escapes (default) -//@ + `-n`: remove trailing newline from output -//@ -//@ Examples: -//@ -//@ ```javascript -//@ echo('hello world'); -//@ var str = echo('hello world'); -//@ echo('-n', 'no newline at end'); -//@ ``` -//@ -//@ Prints `string` to stdout, and returns a [ShellString](#shellstringstr). -function _echo(opts) { - // allow strings starting with '-', see issue #20 - var messages = [].slice.call(arguments, opts ? 0 : 1); - var options = {}; - - // If the first argument starts with '-', parse it as options string. - // If parseOptions throws, it wasn't an options string. - try { - options = common.parseOptions(messages[0], { - 'e': 'escapes', - 'n': 'no_newline', - }, { - silent: true, - }); - - // Allow null to be echoed - if (messages[0]) { - messages.shift(); - } - } catch (_) { - // Clear out error if an error occurred - common.state.error = null; - } - - var output = format.apply(null, messages); - - // Add newline if -n is not passed. - if (!options.no_newline) { - output += '\n'; - } - - process.stdout.write(output); - - return output; -} - -module.exports = _echo; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js deleted file mode 100644 index b0ed59e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/error.js +++ /dev/null @@ -1,15 +0,0 @@ -var common = require('./common'); - -//@ -//@ ### error() -//@ -//@ Tests if error occurred in the last command. Returns a truthy value if an -//@ error returned, or a falsy value otherwise. -//@ -//@ **Note**: do not rely on the -//@ return value to be an error message. If you need the last error message, use -//@ the `.stderr` attribute from the last command's return value instead. -function error() { - return common.state.error; -} -module.exports = error; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js deleted file mode 100644 index a1c7fd2..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/errorCode.js +++ /dev/null @@ -1,10 +0,0 @@ -var common = require('./common'); - -//@ -//@ ### errorCode() -//@ -//@ Returns the error code from the last command. -function errorCode() { - return common.state.errorCode; -} -module.exports = errorCode; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js deleted file mode 100644 index e8446f6..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec-child.js +++ /dev/null @@ -1,71 +0,0 @@ -var childProcess = require('child_process'); -var fs = require('fs'); - -function main() { - var paramFilePath = process.argv[2]; - - var serializedParams = fs.readFileSync(paramFilePath, 'utf8'); - var params = JSON.parse(serializedParams); - - var cmd = params.command; - var execOptions = params.execOptions; - var pipe = params.pipe; - var stdoutFile = params.stdoutFile; - var stderrFile = params.stderrFile; - - function isMaxBufferError(err) { - var maxBufferErrorPattern = /^.*\bmaxBuffer\b.*exceeded.*$/; - if (err instanceof Error && err.message && - err.message.match(maxBufferErrorPattern)) { - // < v10 - // Error: stdout maxBuffer exceeded - return true; - } else if (err instanceof RangeError && err.message && - err.message.match(maxBufferErrorPattern)) { - // >= v10 - // RangeError [ERR_CHILD_PROCESS_STDIO_MAXBUFFER]: stdout maxBuffer length - // exceeded - return true; - } - return false; - } - - var stdoutStream = fs.createWriteStream(stdoutFile); - var stderrStream = fs.createWriteStream(stderrFile); - - function appendError(message, code) { - stderrStream.write(message); - process.exitCode = code; - } - - var c = childProcess.exec(cmd, execOptions, function (err) { - if (!err) { - process.exitCode = 0; - } else if (isMaxBufferError(err)) { - appendError('maxBuffer exceeded', 1); - } else if (err.code === undefined && err.message) { - /* istanbul ignore next */ - appendError(err.message, 1); - } else if (err.code === undefined) { - /* istanbul ignore next */ - appendError('Unknown issue', 1); - } else { - process.exitCode = err.code; - } - }); - - c.stdout.pipe(stdoutStream); - c.stderr.pipe(stderrStream); - c.stdout.pipe(process.stdout); - c.stderr.pipe(process.stderr); - - if (pipe) { - c.stdin.end(pipe); - } -} - -// This file should only be executed. This module does not export anything. -/* istanbul ignore else */ -if (require.main === module) { - main(); -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js deleted file mode 100644 index 3907769..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/exec.js +++ /dev/null @@ -1,255 +0,0 @@ -var path = require('path'); -var fs = require('fs'); -var child = require('child_process'); -var common = require('./common'); -var _tempDir = require('./tempdir').tempDir; -var _pwd = require('./pwd'); - -var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; -var DEFAULT_ERROR_CODE = 1; - -common.register('exec', _exec, { - unix: false, - canReceivePipe: true, - wrapOutput: false, - handlesFatalDynamically: true, -}); - -// We use this function to run `exec` synchronously while also providing realtime -// output. -function execSync(cmd, opts, pipe) { - if (!common.config.execPath) { - try { - common.error('Unable to find a path to the node binary. Please manually set config.execPath'); - } catch (e) { - if (opts.fatal) { - throw e; - } - - return; - } - } - - var tempDir = _tempDir(); - var paramsFile = path.join(tempDir, common.randomFileName()); - var stderrFile = path.join(tempDir, common.randomFileName()); - var stdoutFile = path.join(tempDir, common.randomFileName()); - - opts = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary - cwd: _pwd().toString(), - env: process.env, - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - encoding: 'utf8', - }, opts); - - if (fs.existsSync(paramsFile)) common.unlinkSync(paramsFile); - if (fs.existsSync(stderrFile)) common.unlinkSync(stderrFile); - if (fs.existsSync(stdoutFile)) common.unlinkSync(stdoutFile); - - opts.cwd = path.resolve(opts.cwd); - - var paramsToSerialize = { - command: cmd, - execOptions: opts, - pipe, - stdoutFile, - stderrFile, - }; - - // Create the files and ensure these are locked down (for read and write) to - // the current user. The main concerns here are: - // - // * If we execute a command which prints sensitive output, then - // stdoutFile/stderrFile must not be readable by other users. - // * paramsFile must not be readable by other users, or else they can read it - // to figure out the path for stdoutFile/stderrFile and create these first - // (locked down to their own access), which will crash exec() when it tries - // to write to the files. - function writeFileLockedDown(filePath, data) { - fs.writeFileSync(filePath, data, { - encoding: 'utf8', - mode: parseInt('600', 8), - }); - } - writeFileLockedDown(stdoutFile, ''); - writeFileLockedDown(stderrFile, ''); - writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize)); - - var execArgs = [ - path.join(__dirname, 'exec-child.js'), - paramsFile, - ]; - - /* istanbul ignore else */ - if (opts.silent) { - opts.stdio = 'ignore'; - } else { - opts.stdio = [0, 1, 2]; - } - - var code = 0; - - // Welcome to the future - try { - // Bad things if we pass in a `shell` option to child_process.execFileSync, - // so we need to explicitly remove it here. - delete opts.shell; - - child.execFileSync(common.config.execPath, execArgs, opts); - } catch (e) { - // Commands with non-zero exit code raise an exception. - code = e.status || DEFAULT_ERROR_CODE; - } - - // fs.readFileSync uses buffer encoding by default, so call - // it without the encoding option if the encoding is 'buffer'. - // Also, if the exec timeout is too short for node to start up, - // the files will not be created, so these calls will throw. - var stdout = ''; - var stderr = ''; - if (opts.encoding === 'buffer') { - stdout = fs.readFileSync(stdoutFile); - stderr = fs.readFileSync(stderrFile); - } else { - stdout = fs.readFileSync(stdoutFile, opts.encoding); - stderr = fs.readFileSync(stderrFile, opts.encoding); - } - - // No biggie if we can't erase the files now -- they're in a temp dir anyway - // and we locked down permissions (see the note above). - try { common.unlinkSync(paramsFile); } catch (e) {} - try { common.unlinkSync(stderrFile); } catch (e) {} - try { common.unlinkSync(stdoutFile); } catch (e) {} - - if (code !== 0) { - // Note: `silent` should be unconditionally true to avoid double-printing - // the command's stderr, and to avoid printing any stderr when the user has - // set `shell.config.silent`. - common.error(stderr, code, { continue: true, silent: true, fatal: opts.fatal }); - } - var obj = common.ShellString(stdout, stderr, code); - return obj; -} // execSync() - -// Wrapper around exec() to enable echoing output to console in real time -function execAsync(cmd, opts, pipe, callback) { - opts = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary - cwd: _pwd().toString(), - env: process.env, - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - encoding: 'utf8', - }, opts); - - var c = child.exec(cmd, opts, function (err, stdout, stderr) { - if (callback) { - if (!err) { - callback(0, stdout, stderr); - } else if (err.code === undefined) { - // See issue #536 - /* istanbul ignore next */ - callback(1, stdout, stderr); - } else { - callback(err.code, stdout, stderr); - } - } - }); - - if (pipe) c.stdin.end(pipe); - - if (!opts.silent) { - c.stdout.pipe(process.stdout); - c.stderr.pipe(process.stderr); - } - - return c; -} - -//@ -//@ ### exec(command [, options] [, callback]) -//@ -//@ Available options: -//@ -//@ + `async`: Asynchronous execution. If a callback is provided, it will be set to -//@ `true`, regardless of the passed value (default: `false`). -//@ + `fatal`: Exit upon error (default: `false`). -//@ + `silent`: Do not echo program output to console (default: `false`). -//@ + `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and -//@ what is written to stdout and stderr when not in silent mode (default: `'utf8'`). -//@ + and any option available to Node.js's -//@ [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var version = exec('node --version', {silent:true}).stdout; -//@ -//@ var child = exec('some_long_running_process', {async:true}); -//@ child.stdout.on('data', function(data) { -//@ /* ... do something with data ... */ -//@ }); -//@ -//@ exec('some_long_running_process', function(code, stdout, stderr) { -//@ console.log('Exit code:', code); -//@ console.log('Program output:', stdout); -//@ console.log('Program stderr:', stderr); -//@ }); -//@ ``` -//@ -//@ Executes the given `command` _synchronously_, unless otherwise specified. -//@ When in synchronous mode, this returns a [ShellString](#shellstringstr). -//@ Otherwise, this returns the child process object, and the `callback` -//@ receives the arguments `(code, stdout, stderr)`. -//@ -//@ Not seeing the behavior you want? `exec()` runs everything through `sh` -//@ by default (or `cmd.exe` on Windows), which differs from `bash`. If you -//@ need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. -//@ -//@ **Security note:** as `shell.exec()` executes an arbitrary string in the -//@ system shell, it is **critical** to properly sanitize user input to avoid -//@ **command injection**. For more context, consult the [Security -//@ Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). -function _exec(command, options, callback) { - options = options || {}; - - var pipe = common.readFromPipe(); - - // Callback is defined instead of options. - if (typeof options === 'function') { - callback = options; - options = { async: true }; - } - - // Callback is defined with options. - if (typeof options === 'object' && typeof callback === 'function') { - options.async = true; - } - - options = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, - async: false, - }, options); - - if (!command) { - try { - common.error('must specify command'); - } catch (e) { - if (options.fatal) { - throw e; - } - - return; - } - } - - if (options.async) { - return execAsync(command, options, pipe, callback); - } else { - return execSync(command, options, pipe); - } -} -module.exports = _exec; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js deleted file mode 100644 index 80db993..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/find.js +++ /dev/null @@ -1,66 +0,0 @@ -var path = require('path'); -var common = require('./common'); -var _ls = require('./ls'); - -common.register('find', _find, { - cmdOptions: { - 'L': 'link', - }, -}); - -//@ -//@ ### find(path [, path ...]) -//@ ### find(path_array) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ find('src', 'lib'); -//@ find(['src', 'lib']); // same as above -//@ find('.').filter(function(file) { return file.match(/\.js$/); }); -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all -//@ files (however deep) in the given paths. -//@ -//@ The main difference from `ls('-R', path)` is that the resulting file names -//@ include the base directories (e.g., `lib/resources/file1` instead of just `file1`). -function _find(options, paths) { - if (!paths) { - common.error('no path specified'); - } else if (typeof paths === 'string') { - paths = [].slice.call(arguments, 1); - } - - var list = []; - - function pushFile(file) { - if (process.platform === 'win32') { - file = file.replace(/\\/g, '/'); - } - list.push(file); - } - - // why not simply do `ls('-R', paths)`? because the output wouldn't give the base dirs - // to get the base dir in the output, we need instead `ls('-R', 'dir/*')` for every directory - - paths.forEach(function (file) { - var stat; - try { - stat = common.statFollowLinks(file); - } catch (e) { - common.error('no such file or directory: ' + file); - } - - pushFile(file); - - if (stat.isDirectory()) { - _ls({ recursive: true, all: true, link: options.link }, file).forEach(function (subfile) { - pushFile(path.join(file, subfile)); - }); - } - }); - - return list; -} -module.exports = _find; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js deleted file mode 100644 index cfc83e4..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/grep.js +++ /dev/null @@ -1,198 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('grep', _grep, { - globStart: 2, // don't glob-expand the regex - canReceivePipe: true, - cmdOptions: { - 'v': 'inverse', - 'l': 'nameOnly', - 'i': 'ignoreCase', - 'n': 'lineNumber', - 'B': 'beforeContext', - 'A': 'afterContext', - 'C': 'context', - }, -}); - -//@ -//@ ### grep([options,] regex_filter, file [, file ...]) -//@ ### grep([options,] regex_filter, file_array) -//@ -//@ Available options: -//@ -//@ + `-v`: Invert `regex_filter` (only print non-matching lines). -//@ + `-l`: Print only filenames of matching files. -//@ + `-i`: Ignore case. -//@ + `-n`: Print line numbers. -//@ + `-B `: Show `` lines before each result. -//@ + `-A `: Show `` lines after each result. -//@ + `-C `: Show `` lines before and after each result. -B and -A override this option. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ grep('-v', 'GLOBAL_VARIABLE', '*.js'); -//@ grep('GLOBAL_VARIABLE', '*.js'); -//@ grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); -//@ grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); -//@ grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); -//@ ``` -//@ -//@ Reads input string from given files and returns a -//@ [ShellString](#shellstringstr) containing all lines of the @ file that match -//@ the given `regex_filter`. -function _grep(options, regex, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given', 2); - - var idx = 2; - var contextError = ': invalid context length argument'; - // If the option has been found but not read, copy value from arguments - if (options.beforeContext === true) { - idx = 3; - options.beforeContext = Number(arguments[1]); - if (options.beforeContext < 0) { - common.error(options.beforeContext + contextError, 2); - } - } - if (options.afterContext === true) { - idx = 3; - options.afterContext = Number(arguments[1]); - if (options.afterContext < 0) { - common.error(options.afterContext + contextError, 2); - } - } - if (options.context === true) { - idx = 3; - options.context = Number(arguments[1]); - if (options.context < 0) { - common.error(options.context + contextError, 2); - } - } - // If before or after not given but context is, update values - if (typeof options.context === 'number') { - if (options.beforeContext === false) { - options.beforeContext = options.context; - } - if (options.afterContext === false) { - options.afterContext = options.context; - } - } - regex = arguments[idx - 1]; - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var grep = []; - if (options.ignoreCase) { - regex = new RegExp(regex, 'i'); - } - files.forEach(function (file) { - if (!fs.existsSync(file) && file !== '-') { - common.error('no such file or directory: ' + file, 2, { continue: true }); - return; - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - if (options.nameOnly) { - if (contents.match(regex)) { - grep.push(file); - } - } else { - var lines = contents.split('\n'); - var matches = []; - - lines.forEach(function (line, index) { - var matched = line.match(regex); - if ((options.inverse && !matched) || (!options.inverse && matched)) { - var lineNumber = index + 1; - var result = {}; - if (matches.length > 0) { - // If the last result intersects, combine them - var last = matches[matches.length - 1]; - var minimumLineNumber = Math.max( - 1, - lineNumber - options.beforeContext - 1, - ); - if ( - last.hasOwnProperty('' + lineNumber) || - last.hasOwnProperty('' + minimumLineNumber) - ) { - result = last; - } - } - result[lineNumber] = { - line, - match: true, - }; - if (options.beforeContext > 0) { - // Store the lines with their line numbers to check for overlap - lines - .slice(Math.max(index - options.beforeContext, 0), index) - .forEach(function (v, i, a) { - var lineNum = '' + (index - a.length + i + 1); - if (!result.hasOwnProperty(lineNum)) { - result[lineNum] = { line: v, match: false }; - } - }); - } - if (options.afterContext > 0) { - // Store the lines with their line numbers to check for overlap - lines - .slice( - index + 1, - Math.min(index + options.afterContext + 1, lines.length - 1), - ) - .forEach(function (v, i) { - var lineNum = '' + (index + 1 + i + 1); - if (!result.hasOwnProperty(lineNum)) { - result[lineNum] = { line: v, match: false }; - } - }); - } - // Only add the result if it's new - if (!matches.includes(result)) { - matches.push(result); - } - } - }); - - // Loop through the matches and add them to the output - Array.prototype.push.apply( - grep, - matches.map(function (result) { - return Object.entries(result) - .map(function (entry) { - var lineNumber = entry[0]; - var line = entry[1].line; - var match = entry[1].match; - return options.lineNumber - ? lineNumber + (match ? ':' : '-') + line - : line; - }) - .join('\n'); - }), - ); - } - }); - - if (grep.length === 0 && common.state.errorCode !== 2) { - // We didn't hit the error above, but pattern didn't match - common.error('', { silent: true }); - } - - var separator = '\n'; - if ( - typeof options.beforeContext === 'number' || - typeof options.afterContext === 'number' - ) { - separator = '\n--\n'; - } - return grep.join(separator) + '\n'; -} -module.exports = _grep; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js deleted file mode 100644 index f3f4f22..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/head.js +++ /dev/null @@ -1,107 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('head', _head, { - canReceivePipe: true, - cmdOptions: { - 'n': 'numLines', - }, -}); - -// Reads |numLines| lines or the entire file, whichever is less. -function readSomeLines(file, numLines) { - var buf = common.buffer(); - var bufLength = buf.length; - var bytesRead = bufLength; - var pos = 0; - - var fdr = fs.openSync(file, 'r'); - var numLinesRead = 0; - var ret = ''; - while (bytesRead === bufLength && numLinesRead < numLines) { - bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); - var bufStr = buf.toString('utf8', 0, bytesRead); - numLinesRead += bufStr.split('\n').length - 1; - ret += bufStr; - pos += bytesRead; - } - - fs.closeSync(fdr); - return ret; -} - -//@ -//@ ### head([{'-n': \},] file [, file ...]) -//@ ### head([{'-n': \},] file_array) -//@ -//@ Available options: -//@ -//@ + `-n `: Show the first `` lines of the files -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = head({'-n': 1}, 'file*.txt'); -//@ var str = head('file1', 'file2'); -//@ var str = head(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Read the start of a `file`. Returns a [ShellString](#shellstringstr). -function _head(options, files) { - var head = []; - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given'); - - var idx = 1; - if (options.numLines === true) { - idx = 2; - options.numLines = Number(arguments[1]); - } else if (options.numLines === false) { - options.numLines = 10; - } - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var shouldAppendNewline = false; - files.forEach(function (file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error("error reading '" + file + "': Is a directory", { - continue: true, - }); - return; - } - } - - var contents; - if (file === '-') { - contents = pipe; - } else if (options.numLines < 0) { - contents = fs.readFileSync(file, 'utf8'); - } else { - contents = readSomeLines(file, options.numLines); - } - - var lines = contents.split('\n'); - var hasTrailingNewline = (lines[lines.length - 1] === ''); - if (hasTrailingNewline) { - lines.pop(); - } - shouldAppendNewline = (hasTrailingNewline || options.numLines < lines.length); - - head = head.concat(lines.slice(0, options.numLines)); - }); - - if (shouldAppendNewline) { - head.push(''); // to add a trailing newline once we join - } - return head.join('\n'); -} -module.exports = _head; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js deleted file mode 100644 index 1d3d0e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ln.js +++ /dev/null @@ -1,75 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('ln', _ln, { - cmdOptions: { - 's': 'symlink', - 'f': 'force', - }, -}); - -//@ -//@ ### ln([options,] source, dest) -//@ -//@ Available options: -//@ -//@ + `-s`: symlink -//@ + `-f`: force -//@ -//@ Examples: -//@ -//@ ```javascript -//@ ln('file', 'newlink'); -//@ ln('-sf', 'file', 'existing'); -//@ ``` -//@ -//@ Links `source` to `dest`. Use `-f` to force the link, should `dest` already -//@ exist. Returns a [ShellString](#shellstringstr) indicating success or -//@ failure. -function _ln(options, source, dest) { - if (!source || !dest) { - common.error('Missing and/or '); - } - - source = String(source); - var sourcePath = path.normalize(source).replace(RegExp(path.sep + '$'), ''); - var isAbsolute = (path.resolve(source) === sourcePath); - dest = path.resolve(process.cwd(), String(dest)); - - if (fs.existsSync(dest)) { - if (!options.force) { - common.error('Destination file exists', { continue: true }); - } - - fs.unlinkSync(dest); - } - - if (options.symlink) { - var isWindows = process.platform === 'win32'; - var linkType = isWindows ? 'file' : null; - var resolvedSourcePath = isAbsolute ? sourcePath : path.resolve(process.cwd(), path.dirname(dest), source); - if (!fs.existsSync(resolvedSourcePath)) { - common.error('Source file does not exist', { continue: true }); - } else if (isWindows && common.statFollowLinks(resolvedSourcePath).isDirectory()) { - linkType = 'junction'; - } - - try { - fs.symlinkSync(linkType === 'junction' ? resolvedSourcePath : source, dest, linkType); - } catch (err) { - common.error(err.message); - } - } else { - if (!fs.existsSync(source)) { - common.error('Source file does not exist', { continue: true }); - } - try { - fs.linkSync(source, dest); - } catch (err) { - common.error(err.message); - } - } - return ''; -} -module.exports = _ln; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js deleted file mode 100644 index 7f32c6e..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/ls.js +++ /dev/null @@ -1,155 +0,0 @@ -var path = require('path'); -var fs = require('fs'); -var glob = require('fast-glob'); -var common = require('./common'); - -// glob patterns use the UNIX path seperator -var globPatternRecursive = '/**'; - -common.register('ls', _ls, { - cmdOptions: { - 'R': 'recursive', - 'A': 'all', - 'L': 'link', - 'a': 'all_deprecated', - 'd': 'directory', - 'l': 'long', - }, -}); - -//@ -//@ ### ls([options,] [path, ...]) -//@ ### ls([options,] path_array) -//@ -//@ Available options: -//@ -//@ + `-R`: recursive -//@ + `-A`: all files (include files beginning with `.`, except for `.` and `..`) -//@ + `-L`: follow symlinks -//@ + `-d`: list directories themselves, not their contents -//@ + `-l`: provides more details for each file. Specifically, each file is -//@ represented by a structured object with separate fields for file -//@ metadata (see -//@ [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The -//@ return value also overrides `.toString()` to resemble `ls -l`'s -//@ output format for human readability, but programmatic usage should -//@ depend on the stable object format rather than the `.toString()` -//@ representation. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ ls('projs/*.js'); -//@ ls('projs/**/*.js'); // Find all js files recursively in projs -//@ ls('-R', '/users/me', '/tmp'); -//@ ls('-R', ['/users/me', '/tmp']); // same as above -//@ ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all -//@ the files in the given `path`, or files in the current directory if no -//@ `path` is provided. -function _ls(options, paths) { - if (options.all_deprecated) { - // We won't support the -a option as it's hard to image why it's useful - // (it includes '.' and '..' in addition to '.*' files) - // For backwards compatibility we'll dump a deprecated message and proceed as before - common.log('ls: Option -a is deprecated. Use -A instead'); - options.all = true; - } - - if (!paths) { - paths = ['.']; - } else { - paths = [].slice.call(arguments, 1); - } - - var list = []; - - function pushFile(abs, relName, stat) { - if (process.platform === 'win32') { - relName = relName.replace(/\\/g, '/'); - } - if (options.long) { - stat = stat || (options.link ? common.statFollowLinks(abs) : common.statNoFollowLinks(abs)); - list.push(addLsAttributes(relName, stat)); - } else { - // list.push(path.relative(rel || '.', file)); - list.push(relName); - } - } - - paths.forEach(function (p) { - var stat; - - try { - stat = options.link ? common.statFollowLinks(p) : common.statNoFollowLinks(p); - // follow links to directories by default - if (stat.isSymbolicLink()) { - /* istanbul ignore next */ - // workaround for https://github.com/shelljs/shelljs/issues/795 - // codecov seems to have a bug that miscalculate this block as uncovered. - // but according to nyc report this block does get covered. - try { - var _stat = common.statFollowLinks(p); - if (_stat.isDirectory()) { - stat = _stat; - } - } catch (_) {} // bad symlink, treat it like a file - } - } catch (e) { - common.error('no such file or directory: ' + p, 2, { continue: true }); - return; - } - - // If the stat succeeded - if (stat.isDirectory() && !options.directory) { - if (options.recursive) { - // use glob, because it's simple - glob.sync(p + globPatternRecursive, { - // These options are just to make fast-glob be compatible with POSIX - // (bash) wildcard behavior. - onlyFiles: false, - - // These options depend on the cmdOptions provided to ls. - dot: options.all, - followSymbolicLinks: options.link, - }).forEach(function (item) { - // Glob pattern returns the directory itself and needs to be filtered out. - if (path.relative(p, item)) { - pushFile(item, path.relative(p, item)); - } - }); - } else if (options.all) { - // use fs.readdirSync, because it's fast - fs.readdirSync(p).forEach(function (item) { - pushFile(path.join(p, item), item); - }); - } else { - // use fs.readdirSync and then filter out secret files - fs.readdirSync(p).forEach(function (item) { - if (item[0] !== '.') { - pushFile(path.join(p, item), item); - } - }); - } - } else { - pushFile(p, p, stat); - } - }); - - // Add methods, to make this more compatible with ShellStrings - return list; -} - -function addLsAttributes(pathName, stats) { - // Note: this object will contain more information than .toString() returns - stats.name = pathName; - stats.toString = function () { - // Return a string resembling unix's `ls -l` format - return [this.mode, this.nlink, this.uid, this.gid, this.size, this.mtime, this.name].join(' '); - }; - return stats; -} - -module.exports = _ls; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js deleted file mode 100644 index 021cad9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mkdir.js +++ /dev/null @@ -1,102 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('mkdir', _mkdir, { - cmdOptions: { - 'p': 'fullpath', - }, -}); - -// Recursively creates `dir` -function mkdirSyncRecursive(dir) { - var baseDir = path.dirname(dir); - - // Prevents some potential problems arising from malformed UNCs or - // insufficient permissions. - /* istanbul ignore next */ - if (baseDir === dir) { - common.error('dirname() failed: [' + dir + ']'); - } - - // Base dir does not exist, go recursive - if (!fs.existsSync(baseDir)) { - mkdirSyncRecursive(baseDir); - } - - try { - // Base dir created, can create dir - fs.mkdirSync(dir, parseInt('0777', 8)); - } catch (e) { - // swallow error if dir already exists - if (e.code !== 'EEXIST' || common.statNoFollowLinks(dir).isFile()) { throw e; } - } -} - -//@ -//@ ### mkdir([options,] dir [, dir ...]) -//@ ### mkdir([options,] dir_array) -//@ -//@ Available options: -//@ -//@ + `-p`: full path (and create intermediate directories, if necessary) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); -//@ mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above -//@ ``` -//@ -//@ Creates directories. Returns a [ShellString](#shellstringstr) indicating -//@ success or failure. -function _mkdir(options, dirs) { - if (!dirs) common.error('no paths given'); - - if (typeof dirs === 'string') { - dirs = [].slice.call(arguments, 1); - } - // if it's array leave it as it is - - dirs.forEach(function (dir) { - try { - var stat = common.statNoFollowLinks(dir); - if (!options.fullpath) { - common.error('path already exists: ' + dir, { continue: true }); - } else if (stat.isFile()) { - common.error('cannot create directory ' + dir + ': File exists', { continue: true }); - } - return; // skip dir - } catch (e) { - // do nothing - } - - // Base dir does not exist, and no -p option given - var baseDir = path.dirname(dir); - if (!fs.existsSync(baseDir) && !options.fullpath) { - common.error('no such file or directory: ' + baseDir, { continue: true }); - return; // skip dir - } - - try { - if (options.fullpath) { - mkdirSyncRecursive(path.resolve(dir)); - } else { - fs.mkdirSync(dir, parseInt('0777', 8)); - } - } catch (e) { - var reason; - if (e.code === 'EACCES') { - reason = 'Permission denied'; - } else if (e.code === 'ENOTDIR' || e.code === 'ENOENT') { - reason = 'Not a directory'; - } else { - /* istanbul ignore next */ - throw e; - } - common.error('cannot create directory ' + dir + ': ' + reason, { continue: true }); - } - }); - return ''; -} // man arraykdir -module.exports = _mkdir; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js deleted file mode 100644 index 6e89e2f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/mv.js +++ /dev/null @@ -1,119 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); -var cp = require('./cp'); -var rm = require('./rm'); - -common.register('mv', _mv, { - cmdOptions: { - 'f': '!no_force', - 'n': 'no_force', - }, -}); - -// Checks if cureent file was created recently -function checkRecentCreated(sources, index) { - var lookedSource = sources[index]; - return sources.slice(0, index).some(function (src) { - return path.basename(src) === path.basename(lookedSource); - }); -} - -//@ -//@ ### mv([options ,] source [, source ...], dest') -//@ ### mv([options ,] source_array, dest') -//@ -//@ Available options: -//@ -//@ + `-f`: force (default behavior) -//@ + `-n`: no-clobber -//@ -//@ Examples: -//@ -//@ ```javascript -//@ mv('-n', 'file', 'dir/'); -//@ mv('file1', 'file2', 'dir/'); -//@ mv(['file1', 'file2'], 'dir/'); // same as above -//@ ``` -//@ -//@ Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) -//@ indicating success or failure. -function _mv(options, sources, dest) { - // Get sources, dest - if (arguments.length < 3) { - common.error('missing and/or '); - } else if (arguments.length > 3) { - sources = [].slice.call(arguments, 1, arguments.length - 1); - dest = arguments[arguments.length - 1]; - } else if (typeof sources === 'string') { - sources = [sources]; - } else { - // TODO(nate): figure out if we actually need this line - common.error('invalid arguments'); - } - - var exists = fs.existsSync(dest); - var stats = exists && common.statFollowLinks(dest); - - // Dest is not existing dir, but multiple sources given - if ((!exists || !stats.isDirectory()) && sources.length > 1) { - common.error('dest is not a directory (too many sources)'); - } - - // Dest is an existing file, but no -f given - if (exists && stats.isFile() && options.no_force) { - common.error('dest file already exists: ' + dest); - } - - sources.forEach(function (src, srcIndex) { - if (!fs.existsSync(src)) { - common.error('no such file or directory: ' + src, { continue: true }); - return; // skip file - } - - // If here, src exists - - // When copying to '/path/dir': - // thisDest = '/path/dir/file1' - var thisDest = dest; - if (fs.existsSync(dest) && common.statFollowLinks(dest).isDirectory()) { - thisDest = path.normalize(dest + '/' + path.basename(src)); - } - - var thisDestExists = fs.existsSync(thisDest); - - if (thisDestExists && checkRecentCreated(sources, srcIndex)) { - // cannot overwrite file created recently in current execution, but we want to continue copying other files - if (!options.no_force) { - common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); - } - return; - } - - if (fs.existsSync(thisDest) && options.no_force) { - common.error('dest file already exists: ' + thisDest, { continue: true }); - return; // skip file - } - - if (path.resolve(src) === path.dirname(path.resolve(thisDest))) { - common.error('cannot move to self: ' + src, { continue: true }); - return; // skip file - } - - try { - fs.renameSync(src, thisDest); - } catch (e) { - /* istanbul ignore next */ - if (e.code === 'EXDEV') { - // If we're trying to `mv` to an external partition, we'll actually need - // to perform a copy and then clean up the original file. If either the - // copy or the rm fails with an exception, we should allow this - // exception to pass up to the top level. - cp({ recursive: true }, src, thisDest); - rm({ recursive: true, force: true }, src); - } - } - }); // forEach(src) - return ''; -} // mv -module.exports = _mv; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js deleted file mode 100644 index d9eac3f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/popd.js +++ /dev/null @@ -1 +0,0 @@ -// see dirs.js diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js deleted file mode 100644 index d9eac3f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pushd.js +++ /dev/null @@ -1 +0,0 @@ -// see dirs.js diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js deleted file mode 100644 index 8527d8b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/pwd.js +++ /dev/null @@ -1,16 +0,0 @@ -var path = require('path'); -var common = require('./common'); - -common.register('pwd', _pwd, { - allowGlobbing: false, -}); - -//@ -//@ ### pwd() -//@ -//@ Returns the current directory as a [ShellString](#shellstringstr). -function _pwd() { - var pwd = path.resolve(process.cwd()); - return pwd; -} -module.exports = _pwd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js deleted file mode 100644 index 6bb5755..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/rm.js +++ /dev/null @@ -1,201 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('rm', _rm, { - cmdOptions: { - 'f': 'force', - 'r': 'recursive', - 'R': 'recursive', - }, -}); - -// Recursively removes 'dir' -// Adapted from https://github.com/ryanmcgrath/wrench-js -// -// Copyright (c) 2010 Ryan McGrath -// Copyright (c) 2012 Artur Adib -// -// Licensed under the MIT License -// http://www.opensource.org/licenses/mit-license.php -function rmdirSyncRecursive(dir, force, fromSymlink) { - var files; - - files = fs.readdirSync(dir); - - // Loop through and delete everything in the sub-tree after checking it - for (var i = 0; i < files.length; i++) { - var file = dir + '/' + files[i]; - var currFile = common.statNoFollowLinks(file); - - if (currFile.isDirectory()) { // Recursive function back to the beginning - rmdirSyncRecursive(file, force); - } else if (force || isWriteable(file)) { - // Assume it's a file - perhaps a try/catch belongs here? - try { - common.unlinkSync(file); - } catch (e) { - /* istanbul ignore next */ - common.error('could not remove file (code ' + e.code + '): ' + file, { - continue: true, - }); - } - } - } - - // if was directory was referenced through a symbolic link, - // the contents should be removed, but not the directory itself - if (fromSymlink) return; - - // Now that we know everything in the sub-tree has been deleted, we can delete the main directory. - // Huzzah for the shopkeep. - - var result; - try { - // Retry on windows, sometimes it takes a little time before all the files in the directory are gone - var start = Date.now(); - - // TODO: replace this with a finite loop - for (;;) { - try { - result = fs.rmdirSync(dir); - if (fs.existsSync(dir)) throw { code: 'EAGAIN' }; - break; - } catch (er) { - /* istanbul ignore next */ - // In addition to error codes, also check if the directory still exists and loop again if true - if (process.platform === 'win32' && (er.code === 'ENOTEMPTY' || er.code === 'EBUSY' || er.code === 'EPERM' || er.code === 'EAGAIN')) { - if (Date.now() - start > 1000) throw er; - } else if (er.code === 'ENOENT') { - // Directory did not exist, deletion was successful - break; - } else { - throw er; - } - } - } - } catch (e) { - common.error('could not remove directory (code ' + e.code + '): ' + dir, { continue: true }); - } - - return result; -} // rmdirSyncRecursive - -// Hack to determine if file has write permissions for current user -// Avoids having to check user, group, etc, but it's probably slow -function isWriteable(file) { - var writePermission = true; - try { - var __fd = fs.openSync(file, 'a'); - fs.closeSync(__fd); - } catch (e) { - writePermission = false; - } - - return writePermission; -} - -function handleFile(file, options) { - if (options.force || isWriteable(file)) { - // -f was passed, or file is writable, so it can be removed - common.unlinkSync(file); - } else { - common.error('permission denied: ' + file, { continue: true }); - } -} - -function handleDirectory(file, options) { - if (options.recursive) { - // -r was passed, so directory can be removed - rmdirSyncRecursive(file, options.force); - } else { - common.error('path is a directory', { continue: true }); - } -} - -function handleSymbolicLink(file, options) { - var stats; - try { - stats = common.statFollowLinks(file); - } catch (e) { - // symlink is broken, so remove the symlink itself - common.unlinkSync(file); - return; - } - - if (stats.isFile()) { - common.unlinkSync(file); - } else if (stats.isDirectory()) { - if (file[file.length - 1] === '/') { - // trailing separator, so remove the contents, not the link - if (options.recursive) { - // -r was passed, so directory can be removed - var fromSymlink = true; - rmdirSyncRecursive(file, options.force, fromSymlink); - } else { - common.error('path is a directory', { continue: true }); - } - } else { - // no trailing separator, so remove the link - common.unlinkSync(file); - } - } -} - -function handleFIFO(file) { - common.unlinkSync(file); -} - -//@ -//@ ### rm([options,] file [, file ...]) -//@ ### rm([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-f`: force -//@ + `-r, -R`: recursive -//@ -//@ Examples: -//@ -//@ ```javascript -//@ rm('-rf', '/tmp/*'); -//@ rm('some_file.txt', 'another_file.txt'); -//@ rm(['some_file.txt', 'another_file.txt']); // same as above -//@ ``` -//@ -//@ Removes files. Returns a [ShellString](#shellstringstr) indicating success -//@ or failure. -function _rm(options, files) { - if (!files) common.error('no paths given'); - - // Convert to array - files = [].slice.call(arguments, 1); - - files.forEach(function (file) { - var lstats; - try { - var filepath = (file[file.length - 1] === '/') - ? file.slice(0, -1) // remove the '/' so lstatSync can detect symlinks - : file; - lstats = common.statNoFollowLinks(filepath); // test for existence - } catch (e) { - // Path does not exist, no force flag given - if (!options.force) { - common.error('no such file or directory: ' + file, { continue: true }); - } - return; // skip file - } - - // If here, path exists - if (lstats.isFile()) { - handleFile(file, options); - } else if (lstats.isDirectory()) { - handleDirectory(file, options); - } else if (lstats.isSymbolicLink()) { - handleSymbolicLink(file, options); - } else if (lstats.isFIFO()) { - handleFIFO(file); - } - }); // forEach(file) - return ''; -} // rm -module.exports = _rm; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js deleted file mode 100644 index 6936523..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sed.js +++ /dev/null @@ -1,95 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('sed', _sed, { - globStart: 3, // don't glob-expand regexes - canReceivePipe: true, - cmdOptions: { - 'i': 'inplace', - }, -}); - -//@ -//@ ### sed([options,] search_regex, replacement, file [, file ...]) -//@ ### sed([options,] search_regex, replacement, file_array) -//@ -//@ Available options: -//@ -//@ + `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ -//@ -//@ Examples: -//@ -//@ ```javascript -//@ sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); -//@ ``` -//@ -//@ Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on -//@ each of the lines from the input string using the given `search_regex` and `replacement` string or -//@ function. Returns the new [ShellString](#shellstringstr) after replacement. -//@ -//@ Note: -//@ -//@ Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified -//@ using the `$n` syntax: -//@ -//@ ```javascript -//@ sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); -//@ ``` -//@ -//@ Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file -//@ (split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') -//@ will not match anything and nothing will be replaced. -function _sed(options, regex, replacement, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (typeof replacement !== 'string' && typeof replacement !== 'function') { - if (typeof replacement === 'number') { - replacement = replacement.toString(); // fallback - } else { - common.error('invalid replacement string'); - } - } - - // Convert all search strings to RegExp - if (typeof regex === 'string') { - regex = RegExp(regex); - } - - if (!files && !pipe) { - common.error('no files given'); - } - - files = [].slice.call(arguments, 3); - - if (pipe) { - files.unshift('-'); - } - - var sed = []; - files.forEach(function (file) { - if (!fs.existsSync(file) && file !== '-') { - common.error('no such file or directory: ' + file, 2, { continue: true }); - return; - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - var lines = contents.split('\n'); - var result = lines.map(function (line) { - return line.replace(regex, replacement); - }).join('\n'); - - sed.push(result); - - if (options.inplace) { - fs.writeFileSync(file, result, 'utf8'); - } - }); - - if (options.inplace) { - return ''; - } else { - return sed.join('\n'); - } -} -module.exports = _sed; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js deleted file mode 100644 index 6f37bc9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/set.js +++ /dev/null @@ -1,55 +0,0 @@ -var common = require('./common'); - -common.register('set', _set, { - allowGlobbing: false, - wrapOutput: false, -}); - -//@ -//@ ### set(options) -//@ -//@ Available options: -//@ -//@ + `+/-e`: exit upon error (`config.fatal`) -//@ + `+/-v`: verbose: show all commands (`config.verbose`) -//@ + `+/-f`: disable filename expansion (globbing) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ set('-e'); // exit upon first error -//@ set('+e'); // this undoes a "set('-e')" -//@ ``` -//@ -//@ Sets global configuration variables. -function _set(options) { - if (!options) { - var args = [].slice.call(arguments, 0); - if (args.length < 2) common.error('must provide an argument'); - options = args[1]; - } - var negate = (options[0] === '+'); - if (negate) { - options = '-' + options.slice(1); // parseOptions needs a '-' prefix - } - options = common.parseOptions(options, { - 'e': 'fatal', - 'v': 'verbose', - 'f': 'noglob', - }); - - if (negate) { - Object.keys(options).forEach(function (key) { - options[key] = !options[key]; - }); - } - - Object.keys(options).forEach(function (key) { - // Only change the global config if `negate` is false and the option is true - // or if `negate` is true and the option is false (aka negate !== option) - if (negate !== options[key]) { - common.config[key] = options[key]; - } - }); -} -module.exports = _set; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js deleted file mode 100644 index 66b042c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/sort.js +++ /dev/null @@ -1,98 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('sort', _sort, { - canReceivePipe: true, - cmdOptions: { - 'r': 'reverse', - 'n': 'numerical', - }, -}); - -// parse out the number prefix of a line -function parseNumber(str) { - var match = str.match(/^\s*(\d*)\s*(.*)$/); - return { num: Number(match[1]), value: match[2] }; -} - -// compare two strings case-insensitively, but examine case for strings that are -// case-insensitive equivalent -function unixCmp(a, b) { - var aLower = a.toLowerCase(); - var bLower = b.toLowerCase(); - return (aLower === bLower ? - -1 * a.localeCompare(b) : // unix sort treats case opposite how javascript does - aLower.localeCompare(bLower)); -} - -// compare two strings in the fashion that unix sort's -n option works -function numericalCmp(a, b) { - var objA = parseNumber(a); - var objB = parseNumber(b); - if (objA.hasOwnProperty('num') && objB.hasOwnProperty('num')) { - return ((objA.num !== objB.num) ? - (objA.num - objB.num) : - unixCmp(objA.value, objB.value)); - } else { - return unixCmp(objA.value, objB.value); - } -} - -//@ -//@ ### sort([options,] file [, file ...]) -//@ ### sort([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-r`: Reverse the results -//@ + `-n`: Compare according to numerical value -//@ -//@ Examples: -//@ -//@ ```javascript -//@ sort('foo.txt', 'bar.txt'); -//@ sort('-r', 'foo.txt'); -//@ ``` -//@ -//@ Return the contents of the `file`s, sorted line-by-line as a -//@ [ShellString](#shellstringstr). Sorting multiple files mixes their content -//@ (just as unix `sort` does). -function _sort(options, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no files given'); - - files = [].slice.call(arguments, 1); - - if (pipe) { - files.unshift('-'); - } - - var lines = files.reduce(function (accum, file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return accum; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error('read failed: ' + file + ': Is a directory', { - continue: true, - }); - return accum; - } - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - return accum.concat(contents.trimRight().split('\n')); - }, []); - - var sorted = lines.sort(options.numerical ? numericalCmp : unixCmp); - - if (options.reverse) { - sorted = sorted.reverse(); - } - - return sorted.join('\n') + '\n'; -} - -module.exports = _sort; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js deleted file mode 100644 index eee75c5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tail.js +++ /dev/null @@ -1,90 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('tail', _tail, { - canReceivePipe: true, - cmdOptions: { - 'n': 'numLines', - }, -}); - -//@ -//@ ### tail([{'-n': \},] file [, file ...]) -//@ ### tail([{'-n': \},] file_array) -//@ -//@ Available options: -//@ -//@ + `-n `: Show the last `` lines of `file`s -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = tail({'-n': 1}, 'file*.txt'); -//@ var str = tail('file1', 'file2'); -//@ var str = tail(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Read the end of a `file`. Returns a [ShellString](#shellstringstr). -function _tail(options, files) { - var tail = []; - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given'); - - var idx = 1; - var plusOption = false; - if (options.numLines === true) { - idx = 2; - if (arguments[1][0] === '+') { - plusOption = true; - } - options.numLines = Number(arguments[1]); - } else if (options.numLines === false) { - options.numLines = 10; - } - // arguments[0] is a json object - if (arguments[0].numLines[0] === '+') { - plusOption = true; - } - options.numLines = -1 * Math.abs(options.numLines); - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var shouldAppendNewline = false; - files.forEach(function (file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error("error reading '" + file + "': Is a directory", { - continue: true, - }); - return; - } - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - - var lines = contents.split('\n'); - if (lines[lines.length - 1] === '') { - lines.pop(); - shouldAppendNewline = true; - } else { - shouldAppendNewline = false; - } - - tail = tail.concat(plusOption ? lines.slice(-options.numLines - 1) : lines.slice(options.numLines)); - }); - - if (shouldAppendNewline) { - tail.push(''); // to add a trailing newline once we join - } - - return tail.join('\n'); -} - -module.exports = _tail; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js deleted file mode 100644 index b6f7796..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/tempdir.js +++ /dev/null @@ -1,75 +0,0 @@ -var os = require('os'); -var fs = require('fs'); -var common = require('./common'); - -common.register('tempdir', _tempDir, { - allowGlobbing: false, - wrapOutput: false, -}); - -// Returns false if 'dir' is not a writeable directory, 'dir' otherwise -function writeableDir(dir) { - if (!dir || !fs.existsSync(dir)) return false; - - if (!common.statFollowLinks(dir).isDirectory()) return false; - - var testFile = dir + '/' + common.randomFileName(); - try { - fs.writeFileSync(testFile, ' '); - common.unlinkSync(testFile); - return dir; - } catch (e) { - /* istanbul ignore next */ - return false; - } -} - -// Variable to cache the tempdir value for successive lookups. -var cachedTempDir; - -//@ -//@ ### tempdir() -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var tmp = tempdir(); // "/tmp" for most *nix platforms -//@ ``` -//@ -//@ Searches and returns string containing a writeable, platform-dependent temporary directory. -//@ Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). -function _tempDir() { - if (cachedTempDir) return cachedTempDir; - - cachedTempDir = writeableDir(os.tmpdir()) || - writeableDir(process.env.TMPDIR) || - writeableDir(process.env.TEMP) || - writeableDir(process.env.TMP) || - writeableDir(process.env.Wimp$ScrapDir) || // RiscOS - writeableDir('C:\\TEMP') || // Windows - writeableDir('C:\\TMP') || // Windows - writeableDir('\\TEMP') || // Windows - writeableDir('\\TMP') || // Windows - writeableDir('/tmp') || - writeableDir('/var/tmp') || - writeableDir('/usr/tmp') || - writeableDir('.'); // last resort - - return cachedTempDir; -} - -// Indicates if the tempdir value is currently cached. This is exposed for tests -// only. The return value should only be tested for truthiness. -function isCached() { - return cachedTempDir; -} - -// Clears the cached tempDir value, if one is cached. This is exposed for tests -// only. -function clearCache() { - cachedTempDir = undefined; -} - -module.exports.tempDir = _tempDir; -module.exports.isCached = isCached; -module.exports.clearCache = clearCache; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js deleted file mode 100644 index 7e76908..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/test.js +++ /dev/null @@ -1,86 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('test', _test, { - cmdOptions: { - 'b': 'block', - 'c': 'character', - 'd': 'directory', - 'e': 'exists', - 'f': 'file', - 'L': 'link', - 'p': 'pipe', - 'S': 'socket', - }, - wrapOutput: false, - allowGlobbing: false, -}); - - -//@ -//@ ### test(expression) -//@ -//@ Available expression primaries: -//@ -//@ + `'-b', 'path'`: true if path is a block device -//@ + `'-c', 'path'`: true if path is a character device -//@ + `'-d', 'path'`: true if path is a directory -//@ + `'-e', 'path'`: true if path exists -//@ + `'-f', 'path'`: true if path is a regular file -//@ + `'-L', 'path'`: true if path is a symbolic link -//@ + `'-p', 'path'`: true if path is a pipe (FIFO) -//@ + `'-S', 'path'`: true if path is a socket -//@ -//@ Examples: -//@ -//@ ```javascript -//@ if (test('-d', path)) { /* do something with dir */ }; -//@ if (!test('-f', path)) continue; // skip if it's not a regular file -//@ ``` -//@ -//@ Evaluates `expression` using the available primaries and returns -//@ corresponding boolean value. -function _test(options, path) { - if (!path) common.error('no path given'); - - var canInterpret = false; - Object.keys(options).forEach(function (key) { - if (options[key] === true) { - canInterpret = true; - } - }); - - if (!canInterpret) common.error('could not interpret expression'); - - if (options.link) { - try { - return common.statNoFollowLinks(path).isSymbolicLink(); - } catch (e) { - return false; - } - } - - if (!fs.existsSync(path)) return false; - - if (options.exists) return true; - - var stats = common.statFollowLinks(path); - - if (options.block) return stats.isBlockDevice(); - - if (options.character) return stats.isCharacterDevice(); - - if (options.directory) return stats.isDirectory(); - - if (options.file) return stats.isFile(); - - /* istanbul ignore next */ - if (options.pipe) return stats.isFIFO(); - - /* istanbul ignore next */ - if (options.socket) return stats.isSocket(); - - /* istanbul ignore next */ - return false; // fallback -} // test -module.exports = _test; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js deleted file mode 100644 index e4b064f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/to.js +++ /dev/null @@ -1,38 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('to', _to, { - pipeOnly: true, - wrapOutput: false, -}); - -//@ -//@ ### ShellString.prototype.to(file) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cat('input.txt').to('output.txt'); -//@ ``` -//@ -//@ Analogous to the redirection operator `>` in Unix, but works with -//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix -//@ redirections, `to()` will overwrite any existing file!_ Returns the same -//@ [ShellString](#shellstringstr) this operated on, to support chaining. -function _to(options, file) { - if (!file) common.error('wrong arguments'); - - if (!fs.existsSync(path.dirname(file))) { - common.error('no such file or directory: ' + path.dirname(file)); - } - - try { - fs.writeFileSync(file, this.stdout || this.toString(), 'utf8'); - return this; - } catch (e) { - /* istanbul ignore next */ - common.error('could not write to file (code ' + e.code + '): ' + file, { continue: true }); - } -} -module.exports = _to; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js deleted file mode 100644 index dc30e62..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/toEnd.js +++ /dev/null @@ -1,37 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('toEnd', _toEnd, { - pipeOnly: true, - wrapOutput: false, -}); - -//@ -//@ ### ShellString.prototype.toEnd(file) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cat('input.txt').toEnd('output.txt'); -//@ ``` -//@ -//@ Analogous to the redirect-and-append operator `>>` in Unix, but works with -//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the -//@ same [ShellString](#shellstringstr) this operated on, to support chaining. -function _toEnd(options, file) { - if (!file) common.error('wrong arguments'); - - if (!fs.existsSync(path.dirname(file))) { - common.error('no such file or directory: ' + path.dirname(file)); - } - - try { - fs.appendFileSync(file, this.stdout || this.toString(), 'utf8'); - return this; - } catch (e) { - /* istanbul ignore next */ - common.error('could not append to file (code ' + e.code + '): ' + file, { continue: true }); - } -} -module.exports = _toEnd; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js deleted file mode 100644 index a268586..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/touch.js +++ /dev/null @@ -1,117 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('touch', _touch, { - cmdOptions: { - 'a': 'atime_only', - 'c': 'no_create', - 'd': 'date', - 'm': 'mtime_only', - 'r': 'reference', - }, -}); - -//@ -//@ ### touch([options,] file [, file ...]) -//@ ### touch([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-a`: Change only the access time -//@ + `-c`: Do not create any files -//@ + `-m`: Change only the modification time -//@ + `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) -//@ instead of current time -//@ + `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current -//@ time -//@ -//@ Examples: -//@ -//@ ```javascript -//@ touch('source.js'); -//@ touch('-c', 'path/to/file.js'); -//@ touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); -//@ touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); -//@ touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); -//@ ``` -//@ -//@ Update the access and modification times of each file to the current time. -//@ A file argument that does not exist is created empty, unless `-c` is supplied. -//@ This is a partial implementation of -//@ [`touch(1)`](http://linux.die.net/man/1/touch). Returns a -//@ [ShellString](#shellstringstr) indicating success or failure. -function _touch(opts, files) { - if (!files) { - common.error('no files given'); - } else if (typeof files === 'string') { - files = [].slice.call(arguments, 1); - } else { - common.error('file arg should be a string file path or an Array of string file paths'); - } - - files.forEach(function (f) { - touchFile(opts, f); - }); - return ''; -} - -function touchFile(opts, file) { - var stat = tryStatFile(file); - - if (stat && stat.isDirectory()) { - // don't error just exit - return; - } - - // if the file doesn't already exist and the user has specified --no-create then - // this script is finished - if (!stat && opts.no_create) { - return; - } - - // open the file and then close it. this will create it if it doesn't exist but will - // not truncate the file - fs.closeSync(fs.openSync(file, 'a')); - - // - // Set timestamps - // - - // setup some defaults - var now = new Date(); - var mtime = opts.date || now; - var atime = opts.date || now; - - // use reference file - if (opts.reference) { - var refStat = tryStatFile(opts.reference); - if (!refStat) { - common.error('failed to get attributess of ' + opts.reference); - } - mtime = refStat.mtime; - atime = refStat.atime; - } else if (opts.date) { - mtime = opts.date; - atime = opts.date; - } - - if (opts.atime_only && opts.mtime_only) { - // keep the new values of mtime and atime like GNU - } else if (opts.atime_only) { - mtime = stat.mtime; - } else if (opts.mtime_only) { - atime = stat.atime; - } - - fs.utimesSync(file, atime, mtime); -} - -module.exports = _touch; - -function tryStatFile(filePath) { - try { - return common.statFollowLinks(filePath); - } catch (e) { - return null; - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js deleted file mode 100644 index 5802706..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/uniq.js +++ /dev/null @@ -1,93 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -// add c spaces to the left of str -function lpad(c, str) { - var res = '' + str; - if (res.length < c) { - res = Array((c - res.length) + 1).join(' ') + res; - } - return res; -} - -common.register('uniq', _uniq, { - canReceivePipe: true, - cmdOptions: { - 'i': 'ignoreCase', - 'c': 'count', - 'd': 'duplicates', - }, -}); - -//@ -//@ ### uniq([options,] [input, [output]]) -//@ -//@ Available options: -//@ -//@ + `-i`: Ignore case while comparing -//@ + `-c`: Prefix lines by the number of occurrences -//@ + `-d`: Only print duplicate lines, one for each group of identical lines -//@ -//@ Examples: -//@ -//@ ```javascript -//@ uniq('foo.txt'); -//@ uniq('-i', 'foo.txt'); -//@ uniq('-cd', 'foo.txt', 'bar.txt'); -//@ ``` -//@ -//@ Filter adjacent matching lines from `input`. Returns a -//@ [ShellString](#shellstringstr). -function _uniq(options, input, output) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!pipe) { - if (!input) common.error('no input given'); - - if (!fs.existsSync(input)) { - common.error(input + ': No such file or directory'); - } else if (common.statFollowLinks(input).isDirectory()) { - common.error("error reading '" + input + "'"); - } - } - if (output && fs.existsSync(output) && common.statFollowLinks(output).isDirectory()) { - common.error(output + ': Is a directory'); - } - - var lines = (input ? fs.readFileSync(input, 'utf8') : pipe) - .trimRight() - .split('\n'); - - var compare = function (a, b) { - return options.ignoreCase ? - a.toLocaleLowerCase().localeCompare(b.toLocaleLowerCase()) : - a.localeCompare(b); - }; - var uniqed = lines.reduceRight(function (res, e) { - // Perform uniq -c on the input - if (res.length === 0) { - return [{ count: 1, ln: e }]; - } else if (compare(res[0].ln, e) === 0) { - return [{ count: res[0].count + 1, ln: e }].concat(res.slice(1)); - } else { - return [{ count: 1, ln: e }].concat(res); - } - }, []).filter(function (obj) { - // Do we want only duplicated objects? - return options.duplicates ? obj.count > 1 : true; - }).map(function (obj) { - // Are we tracking the counts of each line? - return (options.count ? (lpad(7, obj.count) + ' ') : '') + obj.ln; - }).join('\n') + '\n'; - - if (output) { - (new common.ShellString(uniqed)).to(output); - // if uniq writes to output, nothing is passed to the next command in the pipeline (if any) - return ''; - } else { - return uniqed; - } -} - -module.exports = _uniq; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js deleted file mode 100644 index 8ac7b77..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/shelljs/src/which.js +++ /dev/null @@ -1,119 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('which', _which, { - allowGlobbing: false, - cmdOptions: { - 'a': 'all', - }, -}); - -// XP's system default value for `PATHEXT` system variable, just in case it's not -// set on Windows. -var XP_DEFAULT_PATHEXT = '.com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh'; - -// For earlier versions of NodeJS that doesn't have a list of constants (< v6) -var FILE_EXECUTABLE_MODE = 1; - -function isWindowsPlatform() { - return process.platform === 'win32'; -} - -// Cross-platform method for splitting environment `PATH` variables -function splitPath(p) { - return p ? p.split(path.delimiter) : []; -} - -// Tests are running all cases for this func but it stays uncovered by codecov due to unknown reason -/* istanbul ignore next */ -function isExecutable(pathName) { - try { - // TODO(node-support): replace with fs.constants.X_OK once remove support for node < v6 - fs.accessSync(pathName, FILE_EXECUTABLE_MODE); - } catch (err) { - return false; - } - return true; -} - -function checkPath(pathName) { - return fs.existsSync(pathName) && !common.statFollowLinks(pathName).isDirectory() - && (isWindowsPlatform() || isExecutable(pathName)); -} - -//@ -//@ ### which(command) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var nodeExec = which('node'); -//@ ``` -//@ -//@ Searches for `command` in the system's `PATH`. On Windows, this uses the -//@ `PATHEXT` variable to append the extension if it's not already executable. -//@ Returns a [ShellString](#shellstringstr) containing the absolute path to -//@ `command`. -function _which(options, cmd) { - if (!cmd) common.error('must specify command'); - - var isWindows = isWindowsPlatform(); - var pathArray = splitPath(process.env.PATH); - - var queryMatches = []; - - // No relative/absolute paths provided? - if (!cmd.includes('/')) { - // Assume that there are no extensions to append to queries (this is the - // case for unix) - var pathExtArray = ['']; - if (isWindows) { - // In case the PATHEXT variable is somehow not set (e.g. - // child_process.spawn with an empty environment), use the XP default. - var pathExtEnv = process.env.PATHEXT || XP_DEFAULT_PATHEXT; - pathExtArray = splitPath(pathExtEnv.toUpperCase()); - } - - // Search for command in PATH - for (var k = 0; k < pathArray.length; k++) { - // already found it - if (queryMatches.length > 0 && !options.all) break; - - var attempt = path.resolve(pathArray[k], cmd); - - if (isWindows) { - attempt = attempt.toUpperCase(); - } - - var match = attempt.match(/\.[^<>:"/|?*.]+$/); - if (match && pathExtArray.includes(match[0])) { // this is Windows-only - // The user typed a query with the file extension, like - // `which('node.exe')` - if (checkPath(attempt)) { - queryMatches.push(attempt); - break; - } - } else { // All-platforms - // Cycle through the PATHEXT array, and check each extension - // Note: the array is always [''] on Unix - for (var i = 0; i < pathExtArray.length; i++) { - var ext = pathExtArray[i]; - var newAttempt = attempt + ext; - if (checkPath(newAttempt)) { - queryMatches.push(newAttempt); - break; - } - } - } - } - } else if (checkPath(cmd)) { // a valid absolute or relative path - queryMatches.push(path.resolve(cmd)); - } - - if (queryMatches.length > 0) { - return options.all ? queryMatches : queryMatches[0]; - } - return options.all ? [] : null; -} -module.exports = _which; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt deleted file mode 100644 index eead04a..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/LICENSE.txt +++ /dev/null @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) 2015, Contributors - -Permission to use, copy, modify, and/or distribute this software -for any purpose with or without fee is hereby granted, provided -that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE -LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES -OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, -ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md deleted file mode 100644 index f9c7c00..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# signal-exit - -[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) -[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) -[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) -[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) - -When you want to fire an event no matter how a process exits: - -* reaching the end of execution. -* explicitly having `process.exit(code)` called. -* having `process.kill(pid, sig)` called. -* receiving a fatal signal from outside the process - -Use `signal-exit`. - -```js -var onExit = require('signal-exit') - -onExit(function (code, signal) { - console.log('process exited!') -}) -``` - -## API - -`var remove = onExit(function (code, signal) {}, options)` - -The return value of the function is a function that will remove the -handler. - -Note that the function *only* fires for signals if the signal would -cause the process to exit. That is, there are no other listeners, and -it is a fatal signal. - -## Options - -* `alwaysLast`: Run this handler after any other signal or exit - handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js deleted file mode 100644 index 93703f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/index.js +++ /dev/null @@ -1,202 +0,0 @@ -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -// grab a reference to node's real process object right away -var process = global.process - -const processOk = function (process) { - return process && - typeof process === 'object' && - typeof process.removeListener === 'function' && - typeof process.emit === 'function' && - typeof process.reallyExit === 'function' && - typeof process.listeners === 'function' && - typeof process.kill === 'function' && - typeof process.pid === 'number' && - typeof process.on === 'function' -} - -// some kind of non-node environment, just no-op -/* istanbul ignore if */ -if (!processOk(process)) { - module.exports = function () { - return function () {} - } -} else { - var assert = require('assert') - var signals = require('./signals.js') - var isWin = /^win/i.test(process.platform) - - var EE = require('events') - /* istanbul ignore if */ - if (typeof EE !== 'function') { - EE = EE.EventEmitter - } - - var emitter - if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ - } else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} - } - - // Because this emitter is a global, we have to check to see if a - // previous version of this library failed to enable infinite listeners. - // I know what you're about to say. But literally everything about - // signal-exit is a compromise with evil. Get used to it. - if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true - } - - module.exports = function (cb, opts) { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return function () {} - } - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - - if (loaded === false) { - load() - } - - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() - } - } - emitter.on(ev, cb) - - return remove - } - - var unload = function unload () { - if (!loaded || !processOk(global.process)) { - return - } - loaded = false - - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 - } - module.exports.unload = unload - - var emit = function emit (event, code, signal) { - /* istanbul ignore if */ - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) - } - - // { : , ... } - var sigListeners = {} - signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return - } - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - /* istanbul ignore next */ - process.kill(process.pid, sig) - } - } - }) - - module.exports.signals = function () { - return signals - } - - var loaded = false - - var load = function load () { - if (loaded || !processOk(global.process)) { - return - } - loaded = true - - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) - - process.emit = processEmit - process.reallyExit = processReallyExit - } - module.exports.load = load - - var originalProcessReallyExit = process.reallyExit - var processReallyExit = function processReallyExit (code) { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return - } - process.exitCode = code || /* istanbul ignore next */ 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) - } - - var originalProcessEmit = process.emit - var processEmit = function processEmit (ev, arg) { - if (ev === 'exit' && processOk(global.process)) { - /* istanbul ignore else */ - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - /* istanbul ignore next */ - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json deleted file mode 100644 index e1a0031..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "signal-exit", - "version": "3.0.7", - "description": "when you want to fire an event no matter how a process exits.", - "main": "index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "files": [ - "index.js", - "signals.js" - ], - "repository": { - "type": "git", - "url": "https://github.com/tapjs/signal-exit.git" - }, - "keywords": [ - "signal", - "exit" - ], - "author": "Ben Coe ", - "license": "ISC", - "bugs": { - "url": "https://github.com/tapjs/signal-exit/issues" - }, - "homepage": "https://github.com/tapjs/signal-exit", - "devDependencies": { - "chai": "^3.5.0", - "coveralls": "^3.1.1", - "nyc": "^15.1.0", - "standard-version": "^9.3.1", - "tap": "^15.1.1" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js deleted file mode 100644 index 3bd67a8..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/signal-exit/signals.js +++ /dev/null @@ -1,53 +0,0 @@ -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] - -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} - -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js deleted file mode 100644 index 78fc0c5..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -module.exports = input => { - const LF = typeof input === 'string' ? '\n' : '\n'.charCodeAt(); - const CR = typeof input === 'string' ? '\r' : '\r'.charCodeAt(); - - if (input[input.length - 1] === LF) { - input = input.slice(0, input.length - 1); - } - - if (input[input.length - 1] === CR) { - input = input.slice(0, input.length - 1); - } - - return input; -}; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json deleted file mode 100644 index d9f2a6c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "strip-final-newline", - "version": "2.0.0", - "description": "Strip the final newline character from a string/buffer", - "license": "MIT", - "repository": "sindresorhus/strip-final-newline", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "strip", - "trim", - "remove", - "delete", - "final", - "last", - "end", - "file", - "newline", - "linebreak", - "character", - "string", - "buffer" - ], - "devDependencies": { - "ava": "^0.25.0", - "xo": "^0.23.0" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md deleted file mode 100644 index 32dfd50..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/strip-final-newline/readme.md +++ /dev/null @@ -1,30 +0,0 @@ -# strip-final-newline [![Build Status](https://travis-ci.com/sindresorhus/strip-final-newline.svg?branch=master)](https://travis-ci.com/sindresorhus/strip-final-newline) - -> Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string/buffer - -Can be useful when parsing the output of, for example, `ChildProcess#execFile`, as [binaries usually output a newline at the end](https://stackoverflow.com/questions/729692/why-should-text-files-end-with-a-newline). Normally, you would use `stdout.trim()`, but that would also remove newlines at the start and whitespace. - - -## Install - -``` -$ npm install strip-final-newline -``` - - -## Usage - -```js -const stripFinalNewline = require('strip-final-newline'); - -stripFinalNewline('foo\nbar\n\n'); -//=> 'foo\nbar\n' - -stripFinalNewline(Buffer.from('foo\nbar\n\n')).toString(); -//=> 'foo\nbar\n' -``` - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE deleted file mode 100644 index 7cccaf9..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md deleted file mode 100644 index 38887da..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/README.md +++ /dev/null @@ -1,305 +0,0 @@ -# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) - -> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save to-regex-range -``` - -
-What does this do? - -
- -This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. - -**Example** - -```js -const toRegexRange = require('to-regex-range'); -const regex = new RegExp(toRegexRange('15', '95')); -``` - -A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). - -
- -
- -
-Why use this library? - -
- -### Convenience - -Creating regular expressions for matching numbers gets deceptively complicated pretty fast. - -For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: - -* regex for matching `1` => `/1/` (easy enough) -* regex for matching `1` through `5` => `/[1-5]/` (not bad...) -* regex for matching `1` or `5` => `/(1|5)/` (still easy...) -* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) -* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) -* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) -* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) - -The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. - -**Learn more** - -If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. - -### Heavily tested - -As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. - -Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. - -### Optimized - -Generated regular expressions are optimized: - -* duplicate sequences and character classes are reduced using quantifiers -* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative -* uses fragment caching to avoid processing the same exact string more than once - -
- -
- -## Usage - -Add this library to your javascript application with the following line of code - -```js -const toRegexRange = require('to-regex-range'); -``` - -The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). - -```js -const source = toRegexRange('15', '95'); -//=> 1[5-9]|[2-8][0-9]|9[0-5] - -const regex = new RegExp(`^${source}$`); -console.log(regex.test('14')); //=> false -console.log(regex.test('50')); //=> true -console.log(regex.test('94')); //=> true -console.log(regex.test('96')); //=> false -``` - -## Options - -### options.capture - -**Type**: `boolean` - -**Deafault**: `undefined` - -Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. - -```js -console.log(toRegexRange('-10', '10')); -//=> -[1-9]|-?10|[0-9] - -console.log(toRegexRange('-10', '10', { capture: true })); -//=> (-[1-9]|-?10|[0-9]) -``` - -### options.shorthand - -**Type**: `boolean` - -**Deafault**: `undefined` - -Use the regex shorthand for `[0-9]`: - -```js -console.log(toRegexRange('0', '999999')); -//=> [0-9]|[1-9][0-9]{1,5} - -console.log(toRegexRange('0', '999999', { shorthand: true })); -//=> \d|[1-9]\d{1,5} -``` - -### options.relaxZeros - -**Type**: `boolean` - -**Default**: `true` - -This option relaxes matching for leading zeros when when ranges are zero-padded. - -```js -const source = toRegexRange('-0010', '0010'); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> true -console.log(regex.test('-010')); //=> true -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> true -console.log(regex.test('010')); //=> true -console.log(regex.test('0010')); //=> true -``` - -When `relaxZeros` is false, matching is strict: - -```js -const source = toRegexRange('-0010', '0010', { relaxZeros: false }); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> false -console.log(regex.test('-010')); //=> false -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> false -console.log(regex.test('010')); //=> false -console.log(regex.test('0010')); //=> true -``` - -## Examples - -| **Range** | **Result** | **Compile time** | -| --- | --- | --- | -| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | -| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | -| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | -| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | -| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | -| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | -| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | -| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | -| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | -| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | -| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | -| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | -| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | -| `toRegexRange(5, 5)` | `5` | _8μs_ | -| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | -| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | -| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | -| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | -| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | -| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | -| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | -| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | -| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | -| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | - -## Heads up! - -**Order of arguments** - -When the `min` is larger than the `max`, values will be flipped to create a valid range: - -```js -toRegexRange('51', '29'); -``` - -Is effectively flipped to: - -```js -toRegexRange('29', '51'); -//=> 29|[3-4][0-9]|5[0-1] -``` - -**Steps / increments** - -This library does not support steps (increments). A pr to add support would be welcome. - -## History - -### v2.0.0 - 2017-04-21 - -**New features** - -Adds support for zero-padding! - -### v1.0.0 - -**Optimizations** - -Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. - -## Attribution - -Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") -* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") -* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 63 | [jonschlinkert](https://github.com/jonschlinkert) | -| 3 | [doowb](https://github.com/doowb) | -| 2 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js deleted file mode 100644 index 77fbace..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/index.js +++ /dev/null @@ -1,288 +0,0 @@ -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -const isNumber = require('is-number'); - -const toRegexRange = (min, max, options) => { - if (isNumber(min) === false) { - throw new TypeError('toRegexRange: expected the first argument to be a number'); - } - - if (max === void 0 || min === max) { - return String(min); - } - - if (isNumber(max) === false) { - throw new TypeError('toRegexRange: expected the second argument to be a number.'); - } - - let opts = { relaxZeros: true, ...options }; - if (typeof opts.strictZeros === 'boolean') { - opts.relaxZeros = opts.strictZeros === false; - } - - let relax = String(opts.relaxZeros); - let shorthand = String(opts.shorthand); - let capture = String(opts.capture); - let wrap = String(opts.wrap); - let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; - - if (toRegexRange.cache.hasOwnProperty(cacheKey)) { - return toRegexRange.cache[cacheKey].result; - } - - let a = Math.min(min, max); - let b = Math.max(min, max); - - if (Math.abs(a - b) === 1) { - let result = min + '|' + max; - if (opts.capture) { - return `(${result})`; - } - if (opts.wrap === false) { - return result; - } - return `(?:${result})`; - } - - let isPadded = hasPadding(min) || hasPadding(max); - let state = { min, max, a, b }; - let positives = []; - let negatives = []; - - if (isPadded) { - state.isPadded = isPadded; - state.maxLen = String(state.max).length; - } - - if (a < 0) { - let newMin = b < 0 ? Math.abs(b) : 1; - negatives = splitToPatterns(newMin, Math.abs(a), state, opts); - a = state.a = 0; - } - - if (b >= 0) { - positives = splitToPatterns(a, b, state, opts); - } - - state.negatives = negatives; - state.positives = positives; - state.result = collatePatterns(negatives, positives, opts); - - if (opts.capture === true) { - state.result = `(${state.result})`; - } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { - state.result = `(?:${state.result})`; - } - - toRegexRange.cache[cacheKey] = state; - return state.result; -}; - -function collatePatterns(neg, pos, options) { - let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; - let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; - let intersected = filterPatterns(neg, pos, '-?', true, options) || []; - let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); - return subpatterns.join('|'); -} - -function splitToRanges(min, max) { - let nines = 1; - let zeros = 1; - - let stop = countNines(min, nines); - let stops = new Set([max]); - - while (min <= stop && stop <= max) { - stops.add(stop); - nines += 1; - stop = countNines(min, nines); - } - - stop = countZeros(max + 1, zeros) - 1; - - while (min < stop && stop <= max) { - stops.add(stop); - zeros += 1; - stop = countZeros(max + 1, zeros) - 1; - } - - stops = [...stops]; - stops.sort(compare); - return stops; -} - -/** - * Convert a range to a regex pattern - * @param {Number} `start` - * @param {Number} `stop` - * @return {String} - */ - -function rangeToPattern(start, stop, options) { - if (start === stop) { - return { pattern: start, count: [], digits: 0 }; - } - - let zipped = zip(start, stop); - let digits = zipped.length; - let pattern = ''; - let count = 0; - - for (let i = 0; i < digits; i++) { - let [startDigit, stopDigit] = zipped[i]; - - if (startDigit === stopDigit) { - pattern += startDigit; - - } else if (startDigit !== '0' || stopDigit !== '9') { - pattern += toCharacterClass(startDigit, stopDigit, options); - - } else { - count++; - } - } - - if (count) { - pattern += options.shorthand === true ? '\\d' : '[0-9]'; - } - - return { pattern, count: [count], digits }; -} - -function splitToPatterns(min, max, tok, options) { - let ranges = splitToRanges(min, max); - let tokens = []; - let start = min; - let prev; - - for (let i = 0; i < ranges.length; i++) { - let max = ranges[i]; - let obj = rangeToPattern(String(start), String(max), options); - let zeros = ''; - - if (!tok.isPadded && prev && prev.pattern === obj.pattern) { - if (prev.count.length > 1) { - prev.count.pop(); - } - - prev.count.push(obj.count[0]); - prev.string = prev.pattern + toQuantifier(prev.count); - start = max + 1; - continue; - } - - if (tok.isPadded) { - zeros = padZeros(max, tok, options); - } - - obj.string = zeros + obj.pattern + toQuantifier(obj.count); - tokens.push(obj); - start = max + 1; - prev = obj; - } - - return tokens; -} - -function filterPatterns(arr, comparison, prefix, intersection, options) { - let result = []; - - for (let ele of arr) { - let { string } = ele; - - // only push if _both_ are negative... - if (!intersection && !contains(comparison, 'string', string)) { - result.push(prefix + string); - } - - // or _both_ are positive - if (intersection && contains(comparison, 'string', string)) { - result.push(prefix + string); - } - } - return result; -} - -/** - * Zip strings - */ - -function zip(a, b) { - let arr = []; - for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); - return arr; -} - -function compare(a, b) { - return a > b ? 1 : b > a ? -1 : 0; -} - -function contains(arr, key, val) { - return arr.some(ele => ele[key] === val); -} - -function countNines(min, len) { - return Number(String(min).slice(0, -len) + '9'.repeat(len)); -} - -function countZeros(integer, zeros) { - return integer - (integer % Math.pow(10, zeros)); -} - -function toQuantifier(digits) { - let [start = 0, stop = ''] = digits; - if (stop || start > 1) { - return `{${start + (stop ? ',' + stop : '')}}`; - } - return ''; -} - -function toCharacterClass(a, b, options) { - return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; -} - -function hasPadding(str) { - return /^-?(0+)\d/.test(str); -} - -function padZeros(value, tok, options) { - if (!tok.isPadded) { - return value; - } - - let diff = Math.abs(tok.maxLen - String(value).length); - let relax = options.relaxZeros !== false; - - switch (diff) { - case 0: - return ''; - case 1: - return relax ? '0?' : '0'; - case 2: - return relax ? '0{0,2}' : '00'; - default: { - return relax ? `0{0,${diff}}` : `0{${diff}}`; - } - } -} - -/** - * Cache - */ - -toRegexRange.cache = {}; -toRegexRange.clearCache = () => (toRegexRange.cache = {}); - -/** - * Expose `toRegexRange` - */ - -module.exports = toRegexRange; diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json deleted file mode 100644 index 4ef194f..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/to-regex-range/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "to-regex-range", - "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", - "version": "5.0.1", - "homepage": "https://github.com/micromatch/to-regex-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "micromatch/to-regex-range", - "bugs": { - "url": "https://github.com/micromatch/to-regex-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.0" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "is-number": "^7.0.0" - }, - "devDependencies": { - "fill-range": "^6.0.0", - "gulp-format-md": "^2.0.0", - "mocha": "^6.0.2", - "text-table": "^0.2.0", - "time-diff": "^0.3.1" - }, - "keywords": [ - "bash", - "date", - "expand", - "expansion", - "expression", - "glob", - "match", - "match date", - "match number", - "match numbers", - "match year", - "matches", - "matching", - "number", - "numbers", - "numerical", - "range", - "ranges", - "regex", - "regexp", - "regular", - "regular expression", - "sequence" - ], - "verb": { - "layout": "default", - "toc": false, - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "helpers": { - "examples": { - "displayName": "examples" - } - }, - "related": { - "list": [ - "expand-range", - "fill-range", - "micromatch", - "repeat-element", - "repeat-string" - ] - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md deleted file mode 100644 index 7fb1f20..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/CHANGELOG.md +++ /dev/null @@ -1,166 +0,0 @@ -# Changes - - -## 2.0.2 - -* Rename bin to `node-which` - -## 2.0.1 - -* generate changelog and publish on version bump -* enforce 100% test coverage -* Promise interface - -## 2.0.0 - -* Parallel tests, modern JavaScript, and drop support for node < 8 - -## 1.3.1 - -* update deps -* update travis - -## v1.3.0 - -* Add nothrow option to which.sync -* update tap - -## v1.2.14 - -* appveyor: drop node 5 and 0.x -* travis-ci: add node 6, drop 0.x - -## v1.2.13 - -* test: Pass missing option to pass on windows -* update tap -* update isexe to 2.0.0 -* neveragain.tech pledge request - -## v1.2.12 - -* Removed unused require - -## v1.2.11 - -* Prevent changelog script from being included in package - -## v1.2.10 - -* Use env.PATH only, not env.Path - -## v1.2.9 - -* fix for paths starting with ../ -* Remove unused `is-absolute` module - -## v1.2.8 - -* bullet items in changelog that contain (but don't start with) # - -## v1.2.7 - -* strip 'update changelog' changelog entries out of changelog - -## v1.2.6 - -* make the changelog bulleted - -## v1.2.5 - -* make a changelog, and keep it up to date -* don't include tests in package -* Properly handle relative-path executables -* appveyor -* Attach error code to Not Found error -* Make tests pass on Windows - -## v1.2.4 - -* Fix typo - -## v1.2.3 - -* update isexe, fix regression in pathExt handling - -## v1.2.2 - -* update deps, use isexe module, test windows - -## v1.2.1 - -* Sometimes windows PATH entries are quoted -* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. -* doc cli - -## v1.2.0 - -* Add support for opt.all and -as cli flags -* test the bin -* update travis -* Allow checking for multiple programs in bin/which -* tap 2 - -## v1.1.2 - -* travis -* Refactored and fixed undefined error on Windows -* Support strict mode - -## v1.1.1 - -* test +g exes against secondary groups, if available -* Use windows exe semantics on cygwin & msys -* cwd should be first in path on win32, not last -* Handle lower-case 'env.Path' on Windows -* Update docs -* use single-quotes - -## v1.1.0 - -* Add tests, depend on is-absolute - -## v1.0.9 - -* which.js: root is allowed to execute files owned by anyone - -## v1.0.8 - -* don't use graceful-fs - -## v1.0.7 - -* add license to package.json - -## v1.0.6 - -* isc license - -## 1.0.5 - -* Awful typo - -## 1.0.4 - -* Test for path absoluteness properly -* win: Allow '' as a pathext if cmd has a . in it - -## 1.0.3 - -* Remove references to execPath -* Make `which.sync()` work on Windows by honoring the PATHEXT variable. -* Make `isExe()` always return true on Windows. -* MIT - -## 1.0.2 - -* Only files can be exes - -## 1.0.1 - -* Respect the PATHEXT env for win32 support -* should 0755 the bin -* binary -* guts -* package -* 1st diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md deleted file mode 100644 index cd83350..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# which - -Like the unix `which` utility. - -Finds the first instance of a specified executable in the PATH -environment variable. Does not cache the results, so `hash -r` is not -needed when the PATH changes. - -## USAGE - -```javascript -var which = require('which') - -// async usage -which('node', function (er, resolvedPath) { - // er is returned if no "node" is found on the PATH - // if it is found, then the absolute path to the exec is returned -}) - -// or promise -which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) - -// sync usage -// throws if not found -var resolved = which.sync('node') - -// if nothrow option is used, returns null if not found -resolved = which.sync('node', {nothrow: true}) - -// Pass options to override the PATH and PATHEXT environment vars. -which('node', { path: someOtherPath }, function (er, resolved) { - if (er) - throw er - console.log('found at %j', resolved) -}) -``` - -## CLI USAGE - -Same as the BSD `which(1)` binary. - -``` -usage: which [-as] program ... -``` - -## OPTIONS - -You may pass an options object as the second argument. - -- `path`: Use instead of the `PATH` environment variable. -- `pathExt`: Use instead of the `PATHEXT` environment variable. -- `all`: Return all matches, instead of just the first one. Note that - this means the function returns an array of strings instead of a - single string. diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which deleted file mode 100755 index 7cee372..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/bin/node-which +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env node -var which = require("../") -if (process.argv.length < 3) - usage() - -function usage () { - console.error('usage: which [-as] program ...') - process.exit(1) -} - -var all = false -var silent = false -var dashdash = false -var args = process.argv.slice(2).filter(function (arg) { - if (dashdash || !/^-/.test(arg)) - return true - - if (arg === '--') { - dashdash = true - return false - } - - var flags = arg.substr(1).split('') - for (var f = 0; f < flags.length; f++) { - var flag = flags[f] - switch (flag) { - case 's': - silent = true - break - case 'a': - all = true - break - default: - console.error('which: illegal option -- ' + flag) - usage() - } - } - return false -}) - -process.exit(args.reduce(function (pv, current) { - try { - var f = which.sync(current, { all: all }) - if (all) - f = f.join('\n') - if (!silent) - console.log(f) - return pv; - } catch (e) { - return 1; - } -}, 0)) diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json deleted file mode 100644 index 97ad7fb..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me)", - "name": "which", - "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", - "version": "2.0.2", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-which.git" - }, - "main": "which.js", - "bin": { - "node-which": "./bin/node-which" - }, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "devDependencies": { - "mkdirp": "^0.5.0", - "rimraf": "^2.6.2", - "tap": "^14.6.9" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublish": "npm run changelog", - "prechangelog": "bash gen-changelog.sh", - "changelog": "git add CHANGELOG.md", - "postchangelog": "git commit -m 'update changelog - '${npm_package_version}", - "postpublish": "git push origin --follow-tags" - }, - "files": [ - "which.js", - "bin/node-which" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js b/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js deleted file mode 100644 index 82afffd..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/node_modules/which/which.js +++ /dev/null @@ -1,125 +0,0 @@ -const isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' - -const path = require('path') -const COLON = isWindows ? ';' : ':' -const isexe = require('isexe') - -const getNotFoundError = (cmd) => - Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) - -const getPathInfo = (cmd, opt) => { - const colon = opt.colon || COLON - - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] - : ( - [ - // windows always checks the cwd first - ...(isWindows ? [process.cwd()] : []), - ...(opt.path || process.env.PATH || - /* istanbul ignore next: very unusual */ '').split(colon), - ] - ) - const pathExtExe = isWindows - ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' - : '' - const pathExt = isWindows ? pathExtExe.split(colon) : [''] - - if (isWindows) { - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } - - return { - pathEnv, - pathExt, - pathExtExe, - } -} - -const which = (cmd, opt, cb) => { - if (typeof opt === 'function') { - cb = opt - opt = {} - } - if (!opt) - opt = {} - - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] - - const step = i => new Promise((resolve, reject) => { - if (i === pathEnv.length) - return opt.all && found.length ? resolve(found) - : reject(getNotFoundError(cmd)) - - const ppRaw = pathEnv[i] - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - - const pCmd = path.join(pathPart, cmd) - const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd - : pCmd - - resolve(subStep(p, i, 0)) - }) - - const subStep = (p, i, ii) => new Promise((resolve, reject) => { - if (ii === pathExt.length) - return resolve(step(i + 1)) - const ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return resolve(p + ext) - } - return resolve(subStep(p, i, ii + 1)) - }) - }) - - return cb ? step(0).then(res => cb(null, res), cb) : step(0) -} - -const whichSync = (cmd, opt) => { - opt = opt || {} - - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] - - for (let i = 0; i < pathEnv.length; i ++) { - const ppRaw = pathEnv[i] - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - - const pCmd = path.join(pathPart, cmd) - const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd - : pCmd - - for (let j = 0; j < pathExt.length; j ++) { - const cur = p + pathExt[j] - try { - const is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur - } - } catch (ex) {} - } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null - - throw getNotFoundError(cmd) -} - -module.exports = which -which.sync = whichSync diff --git a/node_modules/npm-mas-mas/cmaki_identifier/npm-do b/node_modules/npm-mas-mas/cmaki_identifier/npm-do deleted file mode 100644 index 4452ece..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/npm-do +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -function npm-do { (PATH=$(npm bin):$PATH; eval $@;) } -# set -x PATH ./node_modules/.bin $PATH diff --git a/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json b/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json deleted file mode 100644 index 0c82187..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/package-lock.json +++ /dev/null @@ -1,480 +0,0 @@ -{ - "name": "cmaki_identifier", - "version": "1.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "cmaki_identifier", - "version": "1.0.0", - "hasInstallScript": true, - "license": "MIT", - "devDependencies": { - "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/fast-glob": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", - "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.8" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", - "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true, - "license": "ISC" - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/npm-mas-mas": { - "version": "0.0.1", - "resolved": "git+ssh://git@github.com/makiolo/npm-mas-mas.git#461824400908b1147f63240c96a4eb52b3e434bb", - "dev": true, - "license": "MIT", - "dependencies": { - "shelljs": ">=0.8.5" - }, - "bin": { - "cmaki": "cmaki_scripts/cmaki.js" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/reusify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", - "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shelljs": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.10.0.tgz", - "integrity": "sha512-Jex+xw5Mg2qMZL3qnzXIfaxEtBaC4n7xifqaqtrZDdlheR70OGkydrPJWT0V1cA1k3nanC86x9FwAmQl6w3Klw==", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "execa": "^5.1.1", - "fast-glob": "^3.3.2" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true, - "license": "ISC" - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - } - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/package.json b/node_modules/npm-mas-mas/cmaki_identifier/package.json deleted file mode 100644 index ecdd629..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/package.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "cmaki_identifier", - "version": "1.0.0", - "description": "identify your platform", - "scripts": { - "clean": "cmaki clean", - "setup": "cmaki setup", - "compile": "cmaki compile", - "install": "cmaki setup && cmaki compile", - "test": "cmaki test", - "upload": "cmaki upload" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/makiolo/cmaki_identifier.git" - }, - "keywords": [ - "c++", - "identifier" - ], - "author": "Ricardo Marmolejo García", - "license": "MIT", - "bugs": { - "url": "https://github.com/makiolo/cmaki_identifier/issues" - }, - "homepage": "https://github.com/makiolo/cmaki_identifier#readme", - "devDependencies": { - "npm-mas-mas": "git+https://github.com/makiolo/npm-mas-mas.git" - } -} diff --git a/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd b/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd deleted file mode 100644 index 36bd277..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/setup.cmd +++ /dev/null @@ -1,7 +0,0 @@ -@echo off -if exist "boostorg_predef" ( - rmdir /s /q boostorg_predef -) -git clone -q https://github.com/boostorg/predef.git boostorg_predef - -..\cmaki_scripts\setup.cmd diff --git a/node_modules/npm-mas-mas/cmaki_identifier/setup.sh b/node_modules/npm-mas-mas/cmaki_identifier/setup.sh deleted file mode 100644 index 4e1af5c..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/setup.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -if [ -d "boostorg_predef" ]; then - rm -Rf boostorg_predef -fi -git clone -q https://github.com/boostorg/predef.git boostorg_predef - -../cmaki_scripts/setup.sh diff --git a/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt b/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt deleted file mode 100644 index b806a9b..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/tests/CMakeLists.txt +++ /dev/null @@ -1,33 +0,0 @@ -if(${CMAKE_SYSTEM_NAME} MATCHES "Android") - set(CMAKE_EXE_LINKER_FLAGS "-static-libgcc -static-libstdc++ -static") -endif() - -add_executable(cmaki_identifier cmaki_identifier.cpp) - -install(TARGETS cmaki_identifier DESTINATION $ENV{CMAKI_INSTALL}) -install(FILES ../cmaki_identifier.cmake DESTINATION $ENV{CMAKI_INSTALL}) -install(PROGRAMS ../cmaki_identifier.sh DESTINATION $ENV{CMAKI_INSTALL}) -install(PROGRAMS ../cmaki_emulator.sh DESTINATION $ENV{CMAKI_INSTALL}) -add_test( - NAME all - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME os - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME arch - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -add_test( - NAME compiler - COMMAND cmaki_identifier - WORKING_DIRECTORY $ENV{CMAKI_INSTALL} - ) -set_tests_properties(os PROPERTIES ENVIRONMENT "CMAKI_INFO=OS") -set_tests_properties(arch PROPERTIES ENVIRONMENT "CMAKI_INFO=ARCH") -set_tests_properties(compiler PROPERTIES ENVIRONMENT "CMAKI_INFO=COMPILER") diff --git a/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp b/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp deleted file mode 100644 index 6cb91e7..0000000 --- a/node_modules/npm-mas-mas/cmaki_identifier/tests/cmaki_identifier.cpp +++ /dev/null @@ -1,345 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#ifdef __EMSCRIPTEN__ -#include -#endif - -#define STR_HELPER(x) #x -#define STR(x) STR_HELPER(x) - -#ifdef _WIN32 - -// problems with variadic in windows -std::string get_environment(const char* varname, const char* default_) -{ - char* varname_str = getenv(varname); - std::string value_str; - if(varname_str == NULL) - value_str = default_; - else - value_str = varname_str; - return value_str; -} - -#else - -template -std::string get_environment(T default_) -{ - return default_; -} - -template -std::string get_environment(T varname, Args ... others) -{ - char* varname_str = getenv(varname); - std::string value_str; - if(varname_str == NULL) - value_str = get_environment(others...); - else - value_str = varname_str; - return value_str; -} - -#endif - -int main() -{ -#ifdef __EMSCRIPTEN__ - #define OPERATIVE_SYSTEM "javascript" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_WINDOWS - #define OPERATIVE_SYSTEM "windows" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_ANDROID - #define OPERATIVE_SYSTEM "android" - #define OPERATIVE_RESTRICTION "_api_" STR(__ANDROID_API__) -#elif BOOST_OS_LINUX - #define OPERATIVE_SYSTEM "linux" - #ifdef __GLIBC__ - #define OPERATIVE_RESTRICTION "_glibc_" STR(__GLIBC__) "." STR(__GLIBC_MINOR__) - #else - #define OPERATIVE_RESTRICTION "" - #endif -#elif BOOST_OS_MACOS - #define OPERATIVE_SYSTEM "macos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_AIX - #define OPERATIVE_SYSTEM "aix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_AMIGAOS - #define OPERATIVE_SYSTEM "amigaos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_BEOS - #define OPERATIVE_SYSTEM "beos" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_BSD - #if BOOST_OS_BSD_DRAGONFLY - #define OPERATIVE_SYSTEM "dragonfly_bsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_FREE - #define OPERATIVE_SYSTEM "freebsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_BSDI - #define OPERATIVE_SYSTEM "bsdios" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_NET - #define OPERATIVE_SYSTEM "netbsd" - #define OPERATIVE_RESTRICTION "" - #elif BOOST_OS_BSD_OPEN - #define OPERATIVE_SYSTEM "openbsd" - #define OPERATIVE_RESTRICTION "" - #endif -#elif BOOST_OS_CYGWIN - #define OPERATIVE_SYSTEM "cygwin" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_HPUX - #define OPERATIVE_SYSTEM "hpux" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_IRIX - #define OPERATIVE_SYSTEM "irix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_OS400 - #define OPERATIVE_SYSTEM "os400" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_QNX - #define OPERATIVE_SYSTEM "qnx" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_SOLARIS - #define OPERATIVE_SYSTEM "solaris" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_UNIX - #define OPERATIVE_SYSTEM "unix" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_SVR4 - #define OPERATIVE_SYSTEM "svr4" - #define OPERATIVE_RESTRICTION "" -#elif BOOST_OS_VMS - #define OPERATIVE_SYSTEM "vms" - #define OPERATIVE_RESTRICTION "" -#else - #define OPERATIVE_SYSTEM "unknown_so" - #define OPERATIVE_RESTRICTION "" -#endif - -#if BOOST_ARCH_X86 - #if BOOST_ARCH_X86_32 - #define ARCHITECTURE "32" - #elif BOOST_ARCH_X86_64 - #define ARCHITECTURE "64" - #else - #define ARCHITECTURE "unknown_arch" - #endif -#elif BOOST_ARCH_ARM - #define ARCHITECTURE "arm" -#elif BOOST_ARCH_ALPHA - #define ARCHITECTURE "alpha" -#elif BOOST_ARCH_BLACKFIN - #define ARCHITECTURE "blackfin" -#elif BOOST_ARCH_CONVEX - #define ARCHITECTURE "convex" -#elif BOOST_ARCH_IA64 - #define ARCHITECTURE "ia64" -#elif BOOST_ARCH_M68K - #define ARCHITECTURE "m68k" -#elif BOOST_ARCH_MIPS - #define ARCHITECTURE "mips" -#elif BOOST_ARCH_PARISK - #define ARCHITECTURE "parisk" -#elif BOOST_ARCH_PPC - #define ARCHITECTURE "ppc" -#elif BOOST_ARCH_PYRAMID - #define ARCHITECTURE "pyramid" -#elif BOOST_ARCH_RS6000 - #define ARCHITECTURE "rs6000" -#elif BOOST_ARCH_SPARC - #define ARCHITECTURE "sparc" -#elif BOOST_ARCH_SH - #define ARCHITECTURE "sh" -#elif BOOST_ARCH_SYS370 - #define ARCHITECTURE "sys370" -#elif BOOST_ARCH_SYS390 - #define ARCHITECTURE "sys390" -#elif BOOST_ARCH_Z - #define ARCHITECTURE "z" -#else - #define ARCHITECTURE "unknown_arch" -#endif - -#if BOOST_COMP_MSVC - #define COMPILER "msvc" - #if _MSC_VER == 1911 - #define COMPILER_RESTRICTION "_2017" - #elif _MSC_VER == 1910 - #define COMPILER_RESTRICTION "_2017" - #elif _MSC_VER == 1900 - #define COMPILER_RESTRICTION "_2015" - #elif _MSC_VER == 1800 - #define COMPILER_RESTRICTION "_2013" - #elif _MSC_VER == 1700 - #define COMPILER_RESTRICTION "_2012" - #elif _MSC_VER == 1600 - #define COMPILER_RESTRICTION "_2010" - #elif _MSC_VER == 1500 - #define COMPILER_RESTRICTION "_2008" - #elif _MSC_VER == 1400 - #define COMPILER_RESTRICTION "_2005" - #elif _MSC_VER == 1310 - #define COMPILER_RESTRICTION "_2003" - #else - #define COMPILER_RESTRICTION "_msc_ver_" STR(_MSC_VER) - #endif -#elif BOOST_COMP_GNUC - #define COMPILER "gcc" - #define COMPILER_RESTRICTION "_" STR(__GNUC__) -#elif BOOST_COMP_CLANG - #define COMPILER "clang" - #define COMPILER_RESTRICTION "_" STR(__clang_major__) -#elif BOOST_COMP_BORLAND - #define COMPILER "borland" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_COMO - #define COMPILER "comeau" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DEC - #define COMPILER "dec" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DIAB - #define COMPILER "diab" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_DMC - #define COMPILER "dmc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SYSC - #define COMPILER "sysc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_EDG - #define COMPILER "edg" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PATH - #define COMPILER "path" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_GCCXML - #define COMPILER "gccxml" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_GHS - #define COMPILER "ghs" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_HPACC - #define COMPILER "hpacc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_IAR - #define COMPILER "iar" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_IBM - #define COMPILER "ibm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_INTEL - #define COMPILER "intel" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_KCC - #define COMPILER "kcc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_LLVM - #define COMPILER "llvm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_HIGHC - #define COMPILER "highc" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MWERKS - #define COMPILER "mwerks" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MRI - #define COMPILER "mri" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_MPW - #define COMPILER "mrw" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PALM - #define COMPILER "palm" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_PGI - #define COMPILER "pgi" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SGI - #define COMPILER "sgi" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_SUNPRO - #define COMPILER "sunpro" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_TENDRA - #define COMPILER "tendra" - #define COMPILER_RESTRICTION "" -#elif BOOST_COMP_WATCOM - #define COMPILER "watcom" - #define COMPILER_RESTRICTION "" -#else - #define COMPILER "unknown_compiler" - #define COMPILER_RESTRICTION "" -#endif - - // structure (3 chunks joined with "-"): - // 1. platform (2 or 3 chunks joined with "_") - // 1.1. operative system (string but forbidden "_" and "-") - // 1.2. architecture (string but forbidden "_" and "-") - // 1.3. (optional) operative system restriction (is explanation and version joined with "_") - // 1.3.1. what is this restriction (string but forbidden "_" and "-") - // 1.3.2. version (1-4 chunks joined with ".") - // 2. compiler (1 or 2 chunks joined with "_") - // 2.1. compiler (string but forbidden "_" and "-") - // 2.2. (optional) compiler restriction (is version) - // 2.2.1. version (1-4 chunks joined with ".") - // 3. build mode (1 or 2 chunks joined with "_") - // 3.1. build_mode (string but forbidden "_" and "-") - // 3.2. (optional) build mode restrictions - - std::string build_mode = get_environment("MODE", "Debug"); - std::string cmaki_entropy = get_environment("CMAKI_ENTROPY", ""); - std::string cmaki_info = get_environment("CMAKI_INFO", "ALL"); - - std::transform(build_mode.begin(), build_mode.end(), build_mode.begin(), ::tolower); - std::transform(cmaki_entropy.begin(), cmaki_entropy.end(), cmaki_entropy.begin(), ::tolower); - - // TODO: mas consultas - // Arquitectura, sólo el numero: 32 o 64 - // Compilador: COMPILER + COMPILER_RESTRICTION - // Todo: OPERATIVE_SYSTEM + "_" + ARCHITECTURE + OPERATIVE_RESTRICTION + "-" + COMPILER + COMPILER_RESTRICTION + "-" + build_mode + cmaki_entropy - if(cmaki_info == "OS") - { - std::cout << OPERATIVE_SYSTEM - << std::endl; - } - else if(cmaki_info == "ARCH") - { - std::cout << ARCHITECTURE - << std::endl; - } - else if(cmaki_info == "COMPILER") - { - std::cout << COMPILER - << COMPILER_RESTRICTION - << std::endl; - } - else // if(cmaki_info == "ALL") - { - std::cout << OPERATIVE_SYSTEM - << "_" << ARCHITECTURE - << OPERATIVE_RESTRICTION - << "-" << COMPILER - << COMPILER_RESTRICTION - << "-" << build_mode; - if(cmaki_entropy.length() > 0) - { - std::cout << "-" << cmaki_entropy; - } - std::cout << std::endl; - } -} diff --git a/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml b/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml deleted file mode 100644 index 44de95c..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/.travis.yml +++ /dev/null @@ -1,5 +0,0 @@ -language: c -services: docker -os: linux -script: - - bash <(curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh) diff --git a/node_modules/npm-mas-mas/cmaki_scripts/LICENSE b/node_modules/npm-mas-mas/cmaki_scripts/LICENSE deleted file mode 100644 index 53546c1..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Ricardo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm-mas-mas/cmaki_scripts/README.md b/node_modules/npm-mas-mas/cmaki_scripts/README.md deleted file mode 100644 index e227c42..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# cmaki_scripts -scripts for cmaki: compile, tests, upload .... - -# windows problems -``` -$ set PATH=%CD%\node_modules\cmaki_scripts;%PATH% -$ echo %PATHEXT% -.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC;.PY;.JS; -``` diff --git a/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd b/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd deleted file mode 100644 index 72202c8..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/bootstrap.cmd +++ /dev/null @@ -1,15 +0,0 @@ -@echo off -IF EXIST node_modules\cmaki ( - echo . -) else ( - md node_modules\cmaki - cd node_modules && git clone -q https://github.com/makiolo/cmaki.git && cd .. - cd node_modules/cmaki && rm -Rf .git && cd ..\.. -) -IF EXIST node_modules\cmaki_generator ( - echo . -) else ( - md node_modules\cmaki_generator - cd node_modules && git clone -q https://github.com/makiolo/cmaki_generator.git && cd .. - cd node_modules/cmaki_generator && rm -Rf .git && cd ..\.. -) diff --git a/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd b/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd deleted file mode 100644 index 0a2db63..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/ci.cmd +++ /dev/null @@ -1,40 +0,0 @@ -@echo off - -echo [0/3] preinstall -set PATH=%CMAKI_PWD%\node_modules\cmaki_scripts;%PATH% -env | sort - -powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/cmaki_depends.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" -if %errorlevel% neq 0 exit /b %errorlevel% - -if exist package.json ( - - echo [1/3] prepare - :: call ncu -u - npm cache clean --force - - echo [2/3] compile - npm install - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [3/3] run tests - npm test - if %errorlevel% neq 0 exit /b %errorlevel% - -) else ( - - echo [1/3] prepare - if exist node_modules\cmaki_scripts (rmdir /s /q node_modules\cmaki_scripts) - powershell -c "$source = 'https://raw.githubusercontent.com/makiolo/npm-mas-mas/master/cmaki_scripts/bootstrap.cmd'; $dest = $env:TEMP + '\bootstrap.cmd'; $WebClient = New-Object System.Net.WebClient; $WebClient.DownloadFile($source,$dest); Invoke-Expression $dest" - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [2/3] compile - call node_modules\cmaki_scripts\install.cmd - if %errorlevel% neq 0 exit /b %errorlevel% - - echo [3/3] run tests - call node_modules\cmaki_scripts\test.cmd - if %errorlevel% neq 0 exit /b %errorlevel% - -) - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/ci.sh b/node_modules/npm-mas-mas/cmaki_scripts/ci.sh deleted file mode 100644 index 066caae..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/ci.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash -set -e - -export NPP_CACHE="${NPP_CACHE:-FALSE}" - -env | sort - -if [[ -d "bin" ]]; then - rm -Rf bin -fi - -if [[ -d "artifacts" ]]; then - rm -Rf artifacts -fi - -if [[ -d "node_modules" ]]; then - rm -Rf node_modules -fi - -if [ -f "artifacts.json" ]; then - rm artifacts.json -fi - -if [ -f "package.json" ]; then - - echo [1/2] compile - npm install - - echo [2/2] run tests - npm test -else - echo [1/2] compile - ./node_modules/cmaki_scripts/setup.sh && ./node_modules/cmaki_scripts/compile.sh - - echo [2/2] run tests - ./node_modules/cmaki_scripts/test.sh -fi - -if [ -f "cmaki.yml" ]; then - echo [3/3] upload artifact - if [ -f "package.json" ]; then - npm run upload - else - ./node_modules/cmaki_scripts/upload.sh - fi -fi diff --git a/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd b/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd deleted file mode 100644 index 5f83632..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/clean.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -rd /s /q artifacts 2> NUL -rd /s /q coverage 2> NUL diff --git a/node_modules/npm-mas-mas/cmaki_scripts/clean.sh b/node_modules/npm-mas-mas/cmaki_scripts/clean.sh deleted file mode 100755 index b204603..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/clean.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CC="${CC:-gcc}" -export MODE="${MODE:-Debug}" -export COMPILER_BASENAME=$(basename ${CC}) - -if [ -d $COMPILER_BASENAME/$MODE ]; then - rm -Rf $COMPILER_BASENAME/$MODE -fi -if [ "$NPP_CACHE" == "FALSE" ]; then - rm -Rf artifacts 2> /dev/null -fi -rm -Rf coverage 2> /dev/null -rm -Rf gcc 2> /dev/null -rm -Rf clang 2> /dev/null - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd deleted file mode 100644 index 674bfd5..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.cmd +++ /dev/null @@ -1,22 +0,0 @@ -@ECHO OFF -SET DIRWORK=%~dp0 - -IF NOT EXIST "%NODE%" ( - IF DEFINED NODEHOME ( - IF EXIST "%NODEHOME%\node.exe" ( - SET NODE="%NODEHOME%\node.exe" - ) ELSE ( - ECHO Error: Missing node.exe from node home: "%NODEHOME%" - ) - ) ELSE ( - IF EXIST "C:\Program Files\nodejs\node.exe" ( - ECHO WARNING: Defaulting NODE configuration - SET NODE=C:\Program Files\nodejs\node.exe - SET NODEHOME=C:\Program Files\nodejs - ) ELSE ( - ECHO ERROR: NODE configuration unavailable! - ) - ) -) - -"%NODE%" %DIRWORK%\cmaki.js %* diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js b/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js deleted file mode 100755 index e204fd7..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/cmaki.js +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env node - -var os = require('os') -var fs = require('fs'); -var path = require('path') -var shelljs = require('shelljs'); -var is_win = (os.platform() === 'win32'); - -if(!process.env.CMAKI_PWD) -{ - if (fs.existsSync(path.join("..", "..", "node_modules", "npm-mas-mas"))) { - shelljs.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); - process.env['CMAKI_PWD'] = path.join(process.cwd(), '..', '..'); - } else { - shelljs.env['CMAKI_PWD'] = path.join(process.cwd()); - process.env['CMAKI_PWD'] = path.join(process.cwd()); - } -} -else -{ - shelljs.env['CMAKI_PWD'] = process.env['CMAKI_PWD']; -} - -if(!process.env.CMAKI_INSTALL) -{ - shelljs.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); - process.env['CMAKI_INSTALL'] = path.join(process.env['CMAKI_PWD'], 'bin'); -} -else -{ - shelljs.env['CMAKI_INSTALL'] = process.env['CMAKI_INSTALL']; -} - -if(!process.env.NPP_SERVER) -{ - shelljs.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' - process.env['NPP_SERVER'] = 'http://artifacts.myftp.biz' -} -else -{ - shelljs.env['NPP_SERVER'] = process.env['NPP_SERVER']; -} - -if(!process.env.NPP_CACHE) -{ - shelljs.env['NPP_CACHE'] = 'TRUE' - process.env['NPP_CACHE'] = 'TRUE' -} -else -{ - shelljs.env['NPP_CACHE'] = process.env['NPP_CACHE']; -} - -if(is_win) -{ - cmaki_identifier = 'cmaki_identifier.cmd' -} -else -{ - cmaki_identifier = 'cmaki_identifier.sh' -} - - -// no check in cmaki_identifier for avoid recursion -if( process.cwd().replace(/\\/g, "/").search("/cmaki_identifier") == -1 ) -{ - if(!fs.existsSync( path.join( process.env['CMAKI_INSTALL'], cmaki_identifier) )) - { - dir_identifier = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_identifier'); - - backup1 = shelljs.env['CMAKI_PWD']; - backup2 = process.env['CMAKI_PWD']; - - shelljs.env['CMAKI_PWD'] = dir_identifier; - process.env['CMAKI_PWD'] = dir_identifier; - - shelljs.cd( dir_identifier ); - - if (shelljs.exec('npm install').code !== 0) { - shelljs.echo('Error detecting compiler (compiling cmaki_identifier ...)'); - shelljs.exit(1); - } - - shelljs.env['CMAKI_PWD'] = backup1; - process.env['CMAKI_PWD'] = backup2; - } -} - -if(!process.env.MODE) -{ - shelljs.env['MODE'] = 'Debug'; - process.env['MODE'] = 'Debug'; -} -else -{ - shelljs.env['MODE'] = process.env['MODE']; -} - -function trim(s) -{ - return ( s || '' ).replace( /^\s+|\s+$/g, '' ); -} - -var environment_vars = []; -next_is_environment_var = false; -process.argv.forEach(function(val, index, array) -{ - if(next_is_environment_var) - { - environment_vars.push(val); - } - next_is_environment_var = (val == '-e'); -}); -environment_vars.forEach(function(val, index, array) -{ - var chunks = val.split("="); - if( chunks.length == 2 ) - { - shelljs.env[chunks[0]] = chunks[1]; - process.env[chunks[0]] = chunks[1]; - } - else - { - console.log("Error in -e with value: " + val); - } -}); - -//////////////////////////////////////////////////////////////////////////////// -// change cwd -shelljs.cd( process.env['CMAKI_PWD'] ); -//////////////////////////////////////////////////////////////////////////////// - - -var dir_script; -var script = process.argv[2]; -if (is_win) -{ - if(fs.existsSync(path.join(process.cwd(), script+".cmd"))) - { - dir_script = process.cwd(); - } - else - { - dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); - } -} -else -{ - if(fs.existsSync(path.join(process.cwd(), script+".sh"))) - { - dir_script = process.cwd(); - } - else - { - dir_script = path.join(process.env['CMAKI_PWD'], 'node_modules', 'npm-mas-mas', 'cmaki_scripts'); - } -} - -if (is_win) -{ - script_execute = path.join(dir_script, script+".cmd"); - exists = fs.existsSync(script_execute); - caller_execute = "cmd /c "; - script_execute = script_execute.replace(/\//g, "\\"); -} -else -{ - script_execute = path.join(dir_script, script+".sh"); - exists = fs.existsSync(script_execute); - caller_execute = "bash "; - script_execute = script_execute.replace(/\\/g, "/"); -} - -console.log("Execute: " + caller_execute + script_execute); - -if(exists) -{ - var child = shelljs.exec(caller_execute + script_execute, {async:true, silent:true}, function(err, stdout, stderr) { - process.exit(err); - }); - child.stdout.on('data', function(data) { - console.log(trim(data)); - }); - child.stderr.on('data', function(data) { - console.log(trim(data)); - }); -} -else -{ - console.log("[error] dont exits: " + script_execute); - process.exit(1); -} - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd deleted file mode 100644 index 2b6cea5..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.cmd +++ /dev/null @@ -1,7 +0,0 @@ -@echo off - -pip install pyyaml -if %errorlevel% neq 0 exit /b %errorlevel% - -pip install poster -if %errorlevel% neq 0 exit /b %errorlevel% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh b/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh deleted file mode 100644 index e52dc93..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/cmaki_depends.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -if [[ "$OSTYPE" =~ ^linux ]]; then - curl -sL https://deb.nodesource.com/setup_8.x | sudo bash - - sudo apt install -y nodejs - sudo npm install -g npm - - # echo 'export PATH=$HOME/local/bin:$PATH' >> ~/.bashrc - # . ~/.bashrc - # mkdir ~/local - # mkdir ~/node-latest-install - # cd ~/node-latest-install - # curl http://nodejs.org/dist/node-latest.tar.gz | tar xz --strip-components=1 - # ./configure --prefix=~/local - # make install # ok, fine, this step probably takes more than 30 seconds... - # curl https://www.npmjs.org/install.sh | sh - # cd - - - sudo apt install -y lcov - sudo apt install -y cppcheck - sudo apt install -y libxaw7-dev # for OIS - sudo apt install -y libgl1-mesa-dev # flow glew - sudo apt install -y freeglut3 freeglut3-dev # for glu (needed for bullet2) - - # cmake 3.5 precompiled - DEPS_DIR=$(pwd)/deps - if [[ -d "$DEPS_DIR" ]]; then - rm -Rf $DEPS_DIR - fi - CMAKE_FILE=cmake-3.5.2-Linux-x86_64.tar.gz - CMAKE_URL=http://www.cmake.org/files/v3.5/${CMAKE_FILE} - wget ${CMAKE_URL} --quiet --no-check-certificate - mkdir -p cmake - tar -xzf ${CMAKE_FILE} -C cmake --strip-components 1 - mv cmake ${DEPS_DIR} - export PATH=${DEPS_DIR}/cmake/bin:${PATH} - cmake --version -else - /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" - brew update - brew doctor - export PATH="/usr/local/bin:$PATH" - brew install node - brew install cmake - brew install lcov - brew install cppcheck -fi -pip install --user pyyaml -pip install --user poster -pip install --user codecov diff --git a/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd b/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd deleted file mode 100644 index 178869f..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/compile.cmd +++ /dev/null @@ -1,14 +0,0 @@ -@echo off - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -echo running in mode %MODE% ... -cd %MODE% -cmake --build . --config %MODE% --target install -set lasterror=%errorlevel% -cd .. -exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/compile.sh b/node_modules/npm-mas-mas/cmaki_scripts/compile.sh deleted file mode 100755 index 084a6ef..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/compile.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_TARGET="${CMAKI_TARGET:-install}" -export COMPILER_BASENAME=$(basename ${CC}) - -echo "running in mode $MODE ... ($COMPILER_BASENAME)" -cd $COMPILER_BASENAME/$MODE - -# CORES=$(grep -c ^processor /proc/cpuinfo) -CORES=12 -cmake --build . --config $MODE --target $CMAKI_TARGET -- -j$CORES -k VERBOSE=1 || cmake --build . --config $MODE --target $CMAKI_TARGET -- -j1 VERBOSE=1 -code=$? -exit $code diff --git a/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd b/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd deleted file mode 100644 index ae010cb..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/create_package.cmd +++ /dev/null @@ -1,28 +0,0 @@ -@echo off - -if DEFINED COMPILER ( - echo Using COMPILER: %COMPILER% -) else ( - set COMPILER="Visual Studio" - echo Env var COMPILER is not defined. Using by default: %COMPILER% -) - -if DEFINED COMPILER_VERSION ( - echo Using COMPILER_VERSION: %COMPILER_VERSION% -) else ( - set COMPILER_VERSION=16 - echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% -) - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -if "%NPP_CI%" == "FALSE" ( - conan install . --build missing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -) - -conan create . npm-mas-mas/testing -s compiler=%COMPILER% -s build_type=%MODE% -s compiler.version=%COMPILER_VERSION% -tf None - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh b/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh deleted file mode 100644 index 8e84f01..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/create_package.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -e - -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if [ "$NPP_CI" == "FALSE" ]; then - conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -fi - -conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/docker.sh b/node_modules/npm-mas-mas/cmaki_scripts/docker.sh deleted file mode 100755 index 2b760f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/docker.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -export IMAGE="${IMAGE:-linux-x64}" -export MODE="${MODE:-Debug}" -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export PACKAGE="${PACKAGE:-undefined}" - -docker run --rm makiolo/$IMAGE > ./dockcross-$IMAGE -sed -e "s#DEFAULT_DOCKCROSS_IMAGE=dockcross/$IMAGE#DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE#g" dockcross-$IMAGE > makiolo-$IMAGE -chmod +x ./makiolo-$IMAGE -if [ "$PACKAGE" == "undefined" ]; then - # CI - ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/ci.sh | bash' -else - # build package - ./makiolo-$IMAGE -a "-e MODE=$MODE -e NPP_CACHE=$NPP_CACHE -e DEFAULT_DOCKCROSS_IMAGE=makiolo/$IMAGE -e PACKAGE=$PACKAGE" bash -c 'curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.sh | CMAKI_INSTALL=$(pwd)/bin bash' -fi -error=$? - -# clean container -docker rmi -f makiolo/$IMAGE - -exit $error diff --git a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd deleted file mode 100644 index 7b70325..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.cmd +++ /dev/null @@ -1,6 +0,0 @@ -@echo off -git checkout -b tmp -git checkout master -git merge master -git pull - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh b/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh deleted file mode 100755 index 48c48f1..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/head_detached.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -set -e -git checkout -b tmp -git checkout master -git merge master -git pull - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/init.sh b/node_modules/npm-mas-mas/cmaki_scripts/init.sh deleted file mode 100755 index ec6e0f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/init.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -PWD="`dirname \"$0\"`" - -cp -v $PWD/init/.travis.yml . -git add .travis.yml - -cp -v $PWD/init/appveyor.yml . -git add appveyor.yml - -cp -v $PWD/init/.clang-format . -git add .clang-format - -cp -v $PWD/init/.gitignore . -git add .gitignore - -cp -v $PWD/init/cmaki.yml . -git add cmaki.yml - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd deleted file mode 100644 index 3366ec8..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.cmd +++ /dev/null @@ -1,30 +0,0 @@ -@echo off - -:: IF DEFINED CMAKI_PWD ( -:: set CMAKI_PWD=%CMAKI_PWD% -:: ) else ( -:: set CMAKI_PWD=%CD% -:: ) -:: -:: IF DEFINED CMAKI_INSTALL ( -:: set CMAKI_INSTALL=%CMAKI_INSTALL% -:: ) else ( -:: set CMAKI_INSTALL=%CMAKI_PWD%/bin -:: ) - -IF DEFINED MODE ( - set MODE=%MODE% -) else ( - set MODE=Debug -) - -IF DEFINED YMLFILE ( - build --yaml=%YMLFILE% -d -) else ( - IF DEFINED PACKAGE ( - build %PACKAGE% -d - ) else ( - echo Error: must define env var YMLFILE or PACKAGE - ) -) - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh b/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh deleted file mode 100755 index a0fd049..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/make_artifact.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export PACKAGE="${PACKAGE:-undefined}" -export YMLFILE="${YMLFILE:-undefined}" - -if [ "$YMLFILE" == "undefined" ]; then - if [ "$PACKAGE" == "undefined" ]; then - echo Error: must define env var YMLFILE or PACKAGE - else - echo building $PACKAGE ... - ./build $PACKAGE --no-back-yaml --no-run-tests -d - fi -else - echo building from yaml file: ${YMLFILE} ... - ./build --yaml=${YMLFILE} --no-run-tests -d -fi diff --git a/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd b/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd deleted file mode 100644 index 87c7d0c..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/publish.cmd +++ /dev/null @@ -1,3 +0,0 @@ -@echo off -git push && npm publish - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/publish.sh b/node_modules/npm-mas-mas/cmaki_scripts/publish.sh deleted file mode 100755 index c74a96f..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/publish.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash -git push && npm publish - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/replace.sh b/node_modules/npm-mas-mas/cmaki_scripts/replace.sh deleted file mode 100755 index 97884f3..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/replace.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -MV="git mv" - -if [[ $3 == "run" ]]; -then - # do sed implace - run=" -i" -else - run="" -fi - -command="ag -w $1 -l --ignore artifacts --ignore node_modules --ignore gcc --ignore clang --ignore bin" -command_search_files="$command | grep -e $1.cpp$ -e $1.h$" -command_search_files_count="$command_search_files | xargs -I{} grep -h -e ^#include {} | grep -h $1 | wc -l" -count=$(eval $command_search_files_count) - -if [[ $count -gt 0 ]]; -then - echo "se renonbrara los siguientes ficheros (utilizando $MV):" - for file in $(eval $command_search_files); - do - destiny=$(echo $file | sed "s/\<$1\>/$2/g") - if [[ $3 == "run" ]]; - then - echo run: $MV $file $destiny - $MV $file $destiny - else - echo dry-run: $MV $file $destiny - fi - done -else - echo "No es necesario renombrar ficheros" -fi - -if [[ $3 == "run" ]]; -then - # echo run: "$command | xargs sed "s/\<$1\>/$2/g" $run" - echo replacing ... -else - echo replace in dry-run -fi -eval $command | xargs -I{} sed "s@\<$1\>@$2@g" $run {} - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/run.cmd b/node_modules/npm-mas-mas/cmaki_scripts/run.cmd deleted file mode 100644 index 2acc40d..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/run.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off -call node_modules\cmaki\setup.cmd -call node_modules\cmaki\compile.cmd -call node_modules\cmaki\test.cmd - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/search.sh b/node_modules/npm-mas-mas/cmaki_scripts/search.sh deleted file mode 100755 index 0e436b4..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/search.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -ag -w --cpp $1 --ignore cmaki --ignore depends --ignore build --ignore cmaki_generator --ignore baul - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd b/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd deleted file mode 100644 index 8ac63c5..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/setup.cmd +++ /dev/null @@ -1,64 +0,0 @@ -@echo off - -setlocal enableextensions - - -:: export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -:: export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if DEFINED COMPILER ( - echo Using COMPILER: %COMPILER% -) else ( - set COMPILER="Visual Studio" - echo Env var COMPILER is not defined. Using by default: %COMPILER% -) - -if DEFINED COMPILER_VERSION ( - echo Using COMPILER_VERSION: %COMPILER_VERSION% -) else ( - set COMPILER_VERSION=16 - echo Env var COMPILER_VERSION is not defined. Using by default: %COMPILER_VERSION% -) - -if DEFINED GENERATOR ( - echo Using Visual Studio generator: %GENERATOR% -) else ( - set GENERATOR=Visual Studio 16 2019 - echo Env var GENERATOR is not defined. Using by default: %GENERATOR% -) - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -if "%Platform%" == "x86" ( - set ARCH=x86 -) else ( - set GENERATOR=%GENERATOR% Win64 - set ARCH=x86_64 -) - -echo running in mode %COMPILER% %COMPILER_VERSION% %ARCH% %MODE% ... -if exist %MODE% (rmdir /s /q %MODE%) -md %MODE% - -:: setup -cd %MODE% - -conan install %CMAKI_PWD% --build never -s build_type=%MODE% -s arch=%ARCH% -s arch_build=%ARCH% -s compiler=%COMPILER% -s compiler.version=%COMPILER_VERSION% - -IF DEFINED Configuration ( - IF DEFINED Platform ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -G"%GENERATOR%" -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% - ) ELSE ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% - ) -) ELSE ( - cmake %CMAKI_PWD% -DWITH_CONAN=1 -DCMAKE_BUILD_TYPE=%MODE% -DCMAKE_INSTALL_PREFIX=%CMAKI_INSTALL% -) - -set lasterror=%errorlevel% -cd %CMAKI_PWD% -exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/setup.sh b/node_modules/npm-mas-mas/cmaki_scripts/setup.sh deleted file mode 100755 index 404e5a9..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/setup.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show | grep -e "\=" | cut -d"=" -f2 | tail -n1)}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export CMAKI_GENERATOR="${CMAKI_GENERATOR:-Unix Makefiles}" -export COVERAGE="${COVERAGE:-FALSE}" -export TESTS_VALGRIND="${TESTS_VALGRIND:-FALSE}" -export COMPILER_BASENAME=$(basename ${CC}) -export CMAKE_TOOLCHAIN_FILE="${CMAKE_TOOLCHAIN_FILE:-"no cross compile"}" -export BUILD_DIR="${BUILD_DIR:-${COMPILER_BASENAME}/${MODE}}" - -if [ "$CMAKE_TOOLCHAIN_FILE" == "no cross compile" ]; then - export CMAKE_TOOLCHAIN_FILE_FILEPATH="" -else - export CMAKE_TOOLCHAIN_FILE_FILEPATH=" -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}" -fi - -echo "running in mode ${MODE} ... ($COMPILER_BASENAME) (${CC} / ${CXX})" - -# setup -if [ ! -d ${BUILD_DIR} ]; then - mkdir -p ${BUILD_DIR} -fi -echo BUILD_DIR=${BUILD_DIR} -cd ${BUILD_DIR} - -if [ -f "CMakeCache.txt" ]; then - rm CMakeCache.txt -fi - -export WITH_CONAN=0 -if [ -f "$CMAKI_PWD/conanfile.txt" ] || [ -f "$CMAKI_PWD/conanfile.py" ]; then - - if [ "$NPP_CI" == "FALSE" ]; then - conan install $CMAKI_PWD --build missing -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} - fi - - echo conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION} - if ! conan install $CMAKI_PWD --build never -s compiler=${COMPILER} -s build_type=${MODE} -s compiler.libcxx=${COMPILER_LIBCXX} -s compiler.version=${COMPILER_VERSION}; then - echo Error conan - exit 1 - fi - export WITH_CONAN=1 -fi - -cmake $CMAKI_PWD ${CMAKE_TOOLCHAIN_FILE_FILEPATH} -DCMAKE_MODULE_PATH=${CMAKI_PWD}/node_modules/npm-mas-mas/cmaki -DCMAKE_INSTALL_PREFIX=${CMAKI_INSTALL} -DCMAKE_BUILD_TYPE=${MODE} -DFIRST_ERROR=1 -G"${CMAKI_GENERATOR}" -DCMAKE_C_COMPILER="${CC}" -DCMAKE_CXX_COMPILER="${CXX}" -DNPP_CACHE=${NPP_CACHE} -DCOVERAGE=${COVERAGE} -DTESTS_VALGRIND=${TESTS_VALGRIND} -DWITH_CONAN=${WITH_CONAN} -code=$? -exit ${code} diff --git a/node_modules/npm-mas-mas/cmaki_scripts/test.cmd b/node_modules/npm-mas-mas/cmaki_scripts/test.cmd deleted file mode 100644 index 33ee4fa..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/test.cmd +++ /dev/null @@ -1,15 +0,0 @@ -@echo off - -if "%Configuration%" == "Release" ( - set MODE=Release -) else ( - set MODE=Debug -) - -echo running in mode %MODE% ... -cd %MODE% -ctest . --no-compress-output --output-on-failure -T Test -C %MODE% -V -set lasterror=%errorlevel% -cd .. - -if %lasterror% neq 0 exit /b %lasterror% diff --git a/node_modules/npm-mas-mas/cmaki_scripts/test.sh b/node_modules/npm-mas-mas/cmaki_scripts/test.sh deleted file mode 100755 index 30ddf60..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/test.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -export NPP_CACHE="${NPP_CACHE:-FALSE}" -export NOCODECOV="${NOCODECOV:-FALSE}" -export COVERAGE="${COVERAGE:-FALSE}" -export CPPCHECK="${CPPCHECK:-FALSE}" -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export COMPILER_BASENAME=$(basename ${CC}) - -echo "running in mode $MODE ... ($COMPILER_BASENAME)" -mkdir -p $COMPILER_BASENAME/$MODE -cd $COMPILER_BASENAME/$MODE - -# tests -ctest . --no-compress-output --output-on-failure -T Test -C $MODE -V -code=$? - -# posttests -if [ "$COVERAGE" == "TRUE" ]; then - if [[ "$CC" == "gcc" ]]; then - if [[ "$MODE" == "Debug" ]]; then - find ../.. -name "*.gcno" -o -name "*.gcda" - lcov -c -i -d ../.. -o coverage.base - # aggregate coverage - lcov -c -d ../.. -o coverage.run - # merge pre & run - lcov -d ../.. -a coverage.base -a coverage.run -o coverage.info - lcov -r coverage.info '/usr/*' -o coverage.info - lcov -r coverage.info 'tests/*' -o coverage.info - lcov -r coverage.info 'gtest/*' -o coverage.info - lcov -r coverage.info 'gmock/*' -o coverage.info - lcov -r coverage.info 'node_modules/*' -o coverage.info - # lcov -l coverage.info - genhtml --no-branch-coverage -o ../../coverage/ coverage.info - if [ "$NOCODECOV" == "FALSE" ]; then - bash <(curl -s https://codecov.io/bash) || echo "Codecov did not collect coverage reports" - fi - rm -f coverage.base coverage.run coverage.info - fi - fi -fi - -if [ "$CPPCHECK" == "TRUE" ]; then - if [[ "$CC" == "gcc" ]]; then - if [[ "$MODE" == "Debug" ]]; then - cppcheck -i ../../node_modules -i ../../$COMPILER_BASENAME --inconclusive --check-config --max-configs=10 --enable=all -UDEBUG --inline-suppr ../.. - fi - fi -fi - -exit $code diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd b/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd deleted file mode 100644 index 74063e4..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/upload.cmd +++ /dev/null @@ -1,29 +0,0 @@ -@echo off - -IF DEFINED CMAKI_PWD ( - set CMAKI_PWD=%CMAKI_PWD% -) else ( - set CMAKI_PWD=%CD% -) - -IF DEFINED CMAKI_INSTALL ( - set CMAKI_INSTALL=%CMAKI_INSTALL% -) else ( - set CMAKI_INSTALL=%CMAKI_PWD%/bin -) - -IF DEFINED MODE ( - set MODE=%MODE% -) else ( - set MODE=Debug -) - -set YMLFILE=%CMAKI_PWD%/cmaki.yml - -:: warning, TODO: detectar si hay cambios locales y avisar -git diff %CMAKI_PWD% - -cd %CMAKI_PWD%/node_modules/cmaki_generator -curl -s https://raw.githubusercontent.com/makiolo/cmaki_scripts/master/make_artifact.cmd > __make_artifact.cmd -call __make_artifact.cmd -del __make_artifact.cmd diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload.sh b/node_modules/npm-mas-mas/cmaki_scripts/upload.sh deleted file mode 100755 index a088a9e..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/upload.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -e - -export CC="${CC:-gcc}" -export CXX="${CXX:-g++}" -export MODE="${MODE:-Debug}" -export CMAKI_INSTALL="${CMAKI_INSTALL:-$CMAKI_PWD/bin}" -export YMLFILE=$CMAKI_PWD/cmaki.yml - -git diff $CMAKI_PWD -cd $CMAKI_PWD/node_modules/npm-mas-mas/cmaki_generator -../cmaki_scripts/make_artifact.sh - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd deleted file mode 100644 index 7d4bb06..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.cmd +++ /dev/null @@ -1,5 +0,0 @@ -@echo off - -# upload package -conan upload '*' -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh b/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh deleted file mode 100644 index f62d19d..0000000 --- a/node_modules/npm-mas-mas/cmaki_scripts/upload_package.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -set -e - -# upload package -conan upload '*' -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/docker-compose.yml b/node_modules/npm-mas-mas/docker-compose.yml deleted file mode 100644 index 8c0ae81..0000000 --- a/node_modules/npm-mas-mas/docker-compose.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: '3' -services: - linux64: - build: - context: . - dockerfile: ./docker/Dockerfile.linux-x64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - - windows64: - build: - context: . - dockerfile: ./docker/Dockerfile.windows-x64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - - android64: - build: - context: . - dockerfile: ./docker/Dockerfile.android-arm64 - environment: - - NPP_SERVER=http://servfactor/cpp - command: make clean build - volumes: - - .:/work - diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 b/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 deleted file mode 100644 index e5b726a..0000000 --- a/node_modules/npm-mas-mas/docker/Dockerfile.android-arm64 +++ /dev/null @@ -1,9 +0,0 @@ -FROM dockcross/android-arm64 -ENV PYTHONUNBUFFERED 1 -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -WORKDIR /work -ADD requirements.txt /work -RUN pip install -r requirements.txt - diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 b/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 deleted file mode 100644 index 4a132bd..0000000 --- a/node_modules/npm-mas-mas/docker/Dockerfile.linux-x64 +++ /dev/null @@ -1,16 +0,0 @@ -FROM dockcross/linux-x64 -ENV PYTHONUNBUFFERED 1 -RUN echo 'deb http://ftp.us.debian.org/debian testing main contrib non-free' > /etc/apt/sources.list.d/gcc.testing.list -RUN apt-get update -RUN apt-get install -y -t testing g++ -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -RUN apt install -y libgl1-mesa-dev -RUN apt install -y libx11-dev -RUN apt install -y python3-pip -WORKDIR /work -RUN pip3 install conan==1.6.1 -RUN pip3 install pyyaml==3.13 -RUN pip3 install requests==2.19.1 - diff --git a/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 b/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 deleted file mode 100644 index d30d465..0000000 --- a/node_modules/npm-mas-mas/docker/Dockerfile.windows-x64 +++ /dev/null @@ -1,9 +0,0 @@ -FROM dockcross/windows-x64 -ENV PYTHONUNBUFFERED 1 -RUN curl -sL https://deb.nodesource.com/setup_8.x | bash - -RUN apt install -y nodejs -RUN npm install -g npm -WORKDIR /work -ADD requirements.txt /work -RUN pip install -r requirements.txt - diff --git a/node_modules/npm-mas-mas/docker/entrypoint.sh b/node_modules/npm-mas-mas/docker/entrypoint.sh deleted file mode 100755 index 122cdaf..0000000 --- a/node_modules/npm-mas-mas/docker/entrypoint.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!//bin/bash - -export MODE="${MODE:-Debug}" -export COMPILER="${COMPILER:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_LIBCXX="${COMPILER_LIBCXX:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" -export COMPILER_VERSION="${COMPILER_VERSION:-$(conan profile show default | grep -e "\=" | cut -d"=" -f2)}" - -if [ "$(uname)" == "Darwin" ]; then - # mac - export COMPILER=apple-clang COMPILER_VERSION=10.0 COMPILER_LIBCXX=libc++ -fi - -# compile 3rd parties -# conan install . --build missing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -# conan create . npm-mas-mas/testing --build $PACKAGE -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None -# conan upload $PACKAGE/*@npm-mas-mas/testing -r npm-mas-mas --all -c - -# compile only $PACKAGE -conan create . npm-mas-mas/testing -s compiler=$COMPILER -s build_type=$MODE -s compiler.libcxx=$COMPILER_LIBCXX -s compiler.version=$COMPILER_VERSION -tf None -conan upload *@npm-mas-mas/testing -r npm-mas-mas --all -c - diff --git a/node_modules/npm-mas-mas/package.json b/node_modules/npm-mas-mas/package.json deleted file mode 100644 index 72449de..0000000 --- a/node_modules/npm-mas-mas/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "npm-mas-mas", - "version": "0.0.1", - "description": "npm extension for use packing system with C++", - "bin": { - "cmaki": "./cmaki_scripts/cmaki.js" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/makiolo/npm-mas-mas.git" - }, - "keywords": [ - "cmake", - "c++", - "artifacts" - ], - "author": "Ricardo Marmolejo García", - "license": "MIT", - "bugs": { - "url": "https://github.com/makiolo/npm-mas-mas/issues" - }, - "homepage": "https://github.com/makiolo/npm-mas-mas#readme", - "dependencies": { - "shelljs": ">=0.8.5" - } -} - - - diff --git a/node_modules/npm-mas-mas/servfactor/Dockerfile b/node_modules/npm-mas-mas/servfactor/Dockerfile deleted file mode 100644 index 883467b..0000000 --- a/node_modules/npm-mas-mas/servfactor/Dockerfile +++ /dev/null @@ -1,15 +0,0 @@ -FROM nimmis/apache-php5 - -MAINTAINER Ricardo Marmolejo García - -RUN echo "upload_max_filesize=800M" >> /etc/php5/apache2/php.ini -RUN echo "post_max_size=800M" >> /etc/php5/apache2/php.ini -RUN echo "max_input_time=300" >> /etc/php5/apache2/php.ini -RUN echo "max_execution_time=300" >> /etc/php5/apache2/php.ini -RUN echo "error_reporting = E_ALL" >> /etc/php5/apache2/php.ini -RUN echo "display_errors = On" >> /etc/php5/apache2/php.ini - -WORKDIR /var/www/html/cpp -RUN mkdir -p /var/www/html/packages -RUN chmod -R 777 /var/www/html/packages - diff --git a/node_modules/npm-mas-mas/servfactor/Makefile b/node_modules/npm-mas-mas/servfactor/Makefile deleted file mode 100644 index efbcbeb..0000000 --- a/node_modules/npm-mas-mas/servfactor/Makefile +++ /dev/null @@ -1,3 +0,0 @@ -all: - docker-compose up -d --build - diff --git a/node_modules/npm-mas-mas/servfactor/NOTES.md b/node_modules/npm-mas-mas/servfactor/NOTES.md deleted file mode 100644 index 31554b7..0000000 --- a/node_modules/npm-mas-mas/servfactor/NOTES.md +++ /dev/null @@ -1,4 +0,0 @@ -need edit php.ini: - -upload_max_filesize = 500M -post_max_size = 500M diff --git a/node_modules/npm-mas-mas/servfactor/README.md b/node_modules/npm-mas-mas/servfactor/README.md deleted file mode 100644 index 7f52707..0000000 --- a/node_modules/npm-mas-mas/servfactor/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# servfactor -- default artifacts path is $(pwd)/packages (can use symbolic links) -- chmod o+w packages/ -- chmod o+w packages/stats.txt - -# php.ini -- upload_max_filesize=800M -- post_max_size=800M -- max_input_time=300 -- max_execution_time=300 diff --git a/node_modules/npm-mas-mas/servfactor/docker-compose.yml b/node_modules/npm-mas-mas/servfactor/docker-compose.yml deleted file mode 100644 index 9d85ed5..0000000 --- a/node_modules/npm-mas-mas/servfactor/docker-compose.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: '3' -services: - servfactor: - build: . - volumes: - - .:/var/www/html/cpp - - ./packages:/var/www/html/packages - ports: - - "8080:80" - restart: always - diff --git a/node_modules/npm-mas-mas/servfactor/download.php b/node_modules/npm-mas-mas/servfactor/download.php deleted file mode 100755 index 6f536d5..0000000 --- a/node_modules/npm-mas-mas/servfactor/download.php +++ /dev/null @@ -1,58 +0,0 @@ - diff --git a/node_modules/npm-mas-mas/servfactor/index.php b/node_modules/npm-mas-mas/servfactor/index.php deleted file mode 100755 index 6881558..0000000 --- a/node_modules/npm-mas-mas/servfactor/index.php +++ /dev/null @@ -1,227 +0,0 @@ -= 1024 && $i < ( count( $types ) -1 ); $bytes /= 1024, $i++ ); - return( round( $bytes, 2 ) . " " . $types[$i] ); -} - -if(!$quiet_mode) -{ - -/* get disk space free (in bytes) */ -$df = disk_free_space($packages_dir); -/* and get disk space total (in bytes) */ -$dt = disk_total_space($packages_dir); -/* now we calculate the disk space used (in bytes) */ -$du = $dt - $df; -/* percentage of disk used - this will be used to also set the width % of the progress bar */ -$dp = sprintf('%.2f',($du / $dt) * 100); - -/* and we formate the size from bytes to MB, GB, etc. */ -$df = formatSize($df); -$du = formatSize($du); -$dt = formatSize($dt); - -?> - - - - -
-
% Disk Used
-
-
- - - -
-
-format("c")); - } -} -arsort($arr); -$arr = array_keys($arr); - -if(!$quiet_mode) -{ - if($dp > 95) - { - for ($i = 1; $i <= 10; $i++) { - $last_file = array_pop($arr); - if(u::ends_with($last_file, "-cmake.tar.gz")) - { - $big_file = str_replace("-cmake.tar.gz", ".tar.gz", $last_file); - if(!unlink($dir . $last_file)) - { - echo "error removing ".$last_file."
"; - } - else - { - echo "removed ".$last_file."
"; - } - - if(!unlink($dir . $big_file)) - { - echo "error removing ".$dir.$big_file."
"; - } - else - { - echo "removed ".$dir.$big_file."
"; - } - break; - } - } - } -} - -foreach($arr as $file) -{ - // bug si el package tiene "-" - if(u::ends_with($file, "-cmake.tar.gz")) - { - // $substance = $file; - - preg_match('/([\w-]+)-([0-9\.]+)-([\w-\.]+)-cmake.tar.gz/', $file, $matches); - $package = $matches[1]; - $version = $matches[2]; - $platform = $matches[3]; - - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $platform = substr($substance, strrpos($substance, "-")+1); - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $version = substr($substance, strrpos($substance, "-")+1); - // $substance = substr($substance, 0, strrpos($substance, "-")); - // $package = $substance; - if(!isset($_REQUEST['platform']) || ($_REQUEST['platform'] == $platform)) - { - $hits_info = get_hits($data, $file); - $hits = $hits_info[0]; - $last_download = $hits_info[1]; - if($last_download === NULL) - { - if(!$quiet_mode) - { - $formatted = "never downloaded"; - } - else - { - $formatted = "---"; - } - } - else - { - if(!$quiet_mode) - { - $formatted = $last_download->format("d-m-Y H:i"); - } - else - { - $formatted = $last_download->format("c"); - } - } - if(!$quiet_mode) - { - echo "" . $package ." (" . $version . ") "; - if($hits > 0) - { - echo "$platform (".$hits." hits, last use: ".$formatted.")"; - } - else - { - echo "$platform (".$hits." hits)"; - } - echo "
"; - } - else - { - print $package.";".$version.";".$platform.";"."download.php?file=".$file.";".$hits.";".$formatted."\n"; - } - } - } -} - -?> - diff --git a/node_modules/npm-mas-mas/servfactor/packages/README.md b/node_modules/npm-mas-mas/servfactor/packages/README.md deleted file mode 100644 index 734fc3d..0000000 --- a/node_modules/npm-mas-mas/servfactor/packages/README.md +++ /dev/null @@ -1,2 +0,0 @@ -packages dir - diff --git a/node_modules/npm-mas-mas/servfactor/stats.php b/node_modules/npm-mas-mas/servfactor/stats.php deleted file mode 100644 index 1ab9900..0000000 --- a/node_modules/npm-mas-mas/servfactor/stats.php +++ /dev/null @@ -1,68 +0,0 @@ -
"; - }; - $f = fopen($stats, 'r'); - $data = fread($f, filesize($stats)); - $data = unserialize($data); - fclose($f); - } - else - { - $data = array(); - } - - return $data; -} - -function inc_stats($data, $key) -{ - $key = basename($key); - - if(array_key_exists($key, $data)) - { - $data[$key][0] = $data[$key][0] + 1; - $data[$key][1] = new DateTime('NOW'); - } - else - { - $data[$key] = array(1, new DateTime('NOW')); - } - return $data; -} - -function get_hits($data, $key) -{ - $key = basename($key); - - if(array_key_exists($key, $data)) - { - return $data[$key]; - } - else - { - return array(0, NULL); - } -} - -function write_stats($data) -{ - global $stats; - $f = fopen($stats, 'w'); - $data = serialize($data); - fwrite($f, $data); - fclose($f); -} - -?> - diff --git a/node_modules/npm-mas-mas/servfactor/upload.php b/node_modules/npm-mas-mas/servfactor/upload.php deleted file mode 100644 index f57bc22..0000000 --- a/node_modules/npm-mas-mas/servfactor/upload.php +++ /dev/null @@ -1,76 +0,0 @@ -" . $artifacts . ""; - -if(!is_writable($artifacts)) -{ - echo "I don't have permission
"; - exit(1); -} - -$uploaded_file = $artifacts . "/" . basename($_FILES['uploaded']['name']); - -// if(isset($_FILES['uploaded']) && file_exists($uploaded_file)) -// { -// echo "file: ".$uploaded_file." already esxists!"; -// exit(1); -// } - -if (move_uploaded_file($_FILES['uploaded']['tmp_name'], $uploaded_file)) -{ - echo "El fichero es valido y se subio con exito: ". $uploaded_file .".\n"; -} -else -{ -?> -
- Enviar este fichero: - -
-
- - diff --git a/node_modules/npm-mas-mas/servfactor/util.php b/node_modules/npm-mas-mas/servfactor/util.php deleted file mode 100755 index ac69f78..0000000 --- a/node_modules/npm-mas-mas/servfactor/util.php +++ /dev/null @@ -1,2584 +0,0 @@ - - * @link http://github.com/brandonwamboldt/utilphp/ Official Documentation - */ -class util -{ - /** - * A constant representing the number of seconds in a minute, for - * making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_MINUTE = 60; - - /** - * A constant representing the number of seconds in an hour, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_HOUR = 3600; - const SECONDS_IN_AN_HOUR = 3600; - - /** - * A constant representing the number of seconds in a day, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_DAY = 86400; - - /** - * A constant representing the number of seconds in a week, for making - * code more verbose - * - * @var integer - */ - const SECONDS_IN_A_WEEK = 604800; - - /** - * A constant representing the number of seconds in a month (30 days), - * for making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_MONTH = 2592000; - - /** - * A constant representing the number of seconds in a year (365 days), - * for making code more verbose - * - * @var integer - */ - const SECONDS_IN_A_YEAR = 31536000; - - /** - * URL constants as defined in the PHP Manual under "Constants usable with - * http_build_url()". - * - * @see http://us2.php.net/manual/en/http.constants.php#http.constants.url - */ - const HTTP_URL_REPLACE = 1; - const HTTP_URL_JOIN_PATH = 2; - const HTTP_URL_JOIN_QUERY = 4; - const HTTP_URL_STRIP_USER = 8; - const HTTP_URL_STRIP_PASS = 16; - const HTTP_URL_STRIP_AUTH = 32; - const HTTP_URL_STRIP_PORT = 64; - const HTTP_URL_STRIP_PATH = 128; - const HTTP_URL_STRIP_QUERY = 256; - const HTTP_URL_STRIP_FRAGMENT = 512; - const HTTP_URL_STRIP_ALL = 1024; - - /** - * A collapse icon, using in the dump_var function to allow collapsing - * an array or object - * - * @var string - */ - public static $icon_collapse = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFNzFDNDQyNEMyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFNzFDNDQyM0MyQzkxMUUxOTU4MEM4M0UxRDA0MUVGNSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3NDlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MjlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PuF4AWkAAAA2UExURU9t2DBStczM/1h16DNmzHiW7iNFrypMvrnD52yJ4ezs7Onp6ejo6P///+Tk5GSG7D9h5SRGq0Q2K74AAAA/SURBVHjaLMhZDsAgDANRY3ZISnP/y1ZWeV+jAeuRSky6cKL4ryDdSggP8UC7r6GvR1YHxjazPQDmVzI/AQYAnFQDdVSJ80EAAAAASUVORK5CYII='; - - /** - * A collapse icon, using in the dump_var function to allow collapsing - * an array or object - * - * @var string - */ - public static $icon_expand = 'iVBORw0KGgoAAAANSUhEUgAAAAkAAAAJCAMAAADXT/YiAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2RpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMC1jMDYwIDYxLjEzNDc3NywgMjAxMC8wMi8xMi0xNzozMjowMCAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpFQzZERTJDNEMyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpFQzZERTJDM0MyQzkxMUUxODRCQzgyRUNDMzZEQkZFQiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IFdpbmRvd3MiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDo3MzlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDo3MTlFRjQ2NkM5QzJFMTExOTA0MzkwRkI0M0ZCODY4RCIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PkmDvWIAAABIUExURU9t2MzM/3iW7ubm59/f5urq85mZzOvr6////9ra38zMzObm5rfB8FZz5myJ4SNFrypMvjBStTNmzOvr+mSG7OXl8T9h5SRGq/OfqCEAAABKSURBVHjaFMlbEoAwCEPRULXF2jdW9r9T4czcyUdA4XWB0IgdNSybxU9amMzHzDlPKKu7Fd1e6+wY195jW0ARYZECxPq5Gn8BBgCr0gQmxpjKAwAAAABJRU5ErkJggg=='; - - private static $hasArray = false; - - /** - * Map of special non-ASCII characters and suitable ASCII replacement - * characters. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - public static $maps = array( - 'de' => array(/* German */ - 'Ä' => 'Ae', 'Ö' => 'Oe', 'Ü' => 'Ue', 'ä' => 'ae', 'ö' => 'oe', 'ü' => 'ue', 'ß' => 'ss', - 'ẞ' => 'SS' - ), - 'latin' => array( - 'À' => 'A', 'Á' => 'A', 'Â' => 'A', 'Ã' => 'A', 'Ä' => 'A', 'Å' => 'A','Ă' => 'A', 'Æ' => 'AE', 'Ç' => - 'C', 'È' => 'E', 'É' => 'E', 'Ê' => 'E', 'Ë' => 'E', 'Ì' => 'I', 'Í' => 'I', 'Î' => 'I', - 'Ï' => 'I', 'Ð' => 'D', 'Ñ' => 'N', 'Ò' => 'O', 'Ó' => 'O', 'Ô' => 'O', 'Õ' => 'O', 'Ö' => - 'O', 'Ő' => 'O', 'Ø' => 'O','Ș' => 'S','Ț' => 'T', 'Ù' => 'U', 'Ú' => 'U', 'Û' => 'U', 'Ü' => 'U', 'Ű' => 'U', - 'Ý' => 'Y', 'Þ' => 'TH', 'ß' => 'ss', 'à' => 'a', 'á' => 'a', 'â' => 'a', 'ã' => 'a', 'ä' => - 'a', 'å' => 'a', 'ă' => 'a', 'æ' => 'ae', 'ç' => 'c', 'è' => 'e', 'é' => 'e', 'ê' => 'e', 'ë' => 'e', - 'ì' => 'i', 'í' => 'i', 'î' => 'i', 'ï' => 'i', 'ð' => 'd', 'ñ' => 'n', 'ò' => 'o', 'ó' => - 'o', 'ô' => 'o', 'õ' => 'o', 'ö' => 'o', 'ő' => 'o', 'ø' => 'o', 'ș' => 's', 'ț' => 't', 'ù' => 'u', 'ú' => 'u', - 'û' => 'u', 'ü' => 'u', 'ű' => 'u', 'ý' => 'y', 'þ' => 'th', 'ÿ' => 'y' - ), - 'latin_symbols' => array( - '©' => '(c)', - '®' => '(r)' - ), - 'el' => array(/* Greek */ - 'α' => 'a', 'β' => 'b', 'γ' => 'g', 'δ' => 'd', 'ε' => 'e', 'ζ' => 'z', 'η' => 'h', 'θ' => '8', - 'ι' => 'i', 'κ' => 'k', 'λ' => 'l', 'μ' => 'm', 'ν' => 'n', 'ξ' => '3', 'ο' => 'o', 'π' => 'p', - 'ρ' => 'r', 'σ' => 's', 'τ' => 't', 'υ' => 'y', 'φ' => 'f', 'χ' => 'x', 'ψ' => 'ps', 'ω' => 'w', - 'ά' => 'a', 'έ' => 'e', 'ί' => 'i', 'ό' => 'o', 'ύ' => 'y', 'ή' => 'h', 'ώ' => 'w', 'ς' => 's', - 'ϊ' => 'i', 'ΰ' => 'y', 'ϋ' => 'y', 'ΐ' => 'i', - 'Α' => 'A', 'Β' => 'B', 'Γ' => 'G', 'Δ' => 'D', 'Ε' => 'E', 'Ζ' => 'Z', 'Η' => 'H', 'Θ' => '8', - 'Ι' => 'I', 'Κ' => 'K', 'Λ' => 'L', 'Μ' => 'M', 'Ν' => 'N', 'Ξ' => '3', 'Ο' => 'O', 'Π' => 'P', - 'Ρ' => 'R', 'Σ' => 'S', 'Τ' => 'T', 'Υ' => 'Y', 'Φ' => 'F', 'Χ' => 'X', 'Ψ' => 'PS', 'Ω' => 'W', - 'Ά' => 'A', 'Έ' => 'E', 'Ί' => 'I', 'Ό' => 'O', 'Ύ' => 'Y', 'Ή' => 'H', 'Ώ' => 'W', 'Ϊ' => 'I', - 'Ϋ' => 'Y' - ), - 'tr' => array(/* Turkish */ - 'ş' => 's', 'Ş' => 'S', 'ı' => 'i', 'İ' => 'I', 'ç' => 'c', 'Ç' => 'C', 'ü' => 'u', 'Ü' => 'U', - 'ö' => 'o', 'Ö' => 'O', 'ğ' => 'g', 'Ğ' => 'G' - ), - 'ru' => array(/* Russian */ - 'а' => 'a', 'б' => 'b', 'в' => 'v', 'г' => 'g', 'д' => 'd', 'е' => 'e', 'ё' => 'yo', 'ж' => 'zh', - 'з' => 'z', 'и' => 'i', 'й' => 'j', 'к' => 'k', 'л' => 'l', 'м' => 'm', 'н' => 'n', 'о' => 'o', - 'п' => 'p', 'р' => 'r', 'с' => 's', 'т' => 't', 'у' => 'u', 'ф' => 'f', 'х' => 'h', 'ц' => 'c', - 'ч' => 'ch', 'ш' => 'sh', 'щ' => 'sh', 'ъ' => '', 'ы' => 'y', 'ь' => '', 'э' => 'e', 'ю' => 'yu', - 'я' => 'ya', - 'А' => 'A', 'Б' => 'B', 'В' => 'V', 'Г' => 'G', 'Д' => 'D', 'Е' => 'E', 'Ё' => 'Yo', 'Ж' => 'Zh', - 'З' => 'Z', 'И' => 'I', 'Й' => 'J', 'К' => 'K', 'Л' => 'L', 'М' => 'M', 'Н' => 'N', 'О' => 'O', - 'П' => 'P', 'Р' => 'R', 'С' => 'S', 'Т' => 'T', 'У' => 'U', 'Ф' => 'F', 'Х' => 'H', 'Ц' => 'C', - 'Ч' => 'Ch', 'Ш' => 'Sh', 'Щ' => 'Sh', 'Ъ' => '', 'Ы' => 'Y', 'Ь' => '', 'Э' => 'E', 'Ю' => 'Yu', - 'Я' => 'Ya', - '№' => '' - ), - 'uk' => array(/* Ukrainian */ - 'Є' => 'Ye', 'І' => 'I', 'Ї' => 'Yi', 'Ґ' => 'G', 'є' => 'ye', 'і' => 'i', 'ї' => 'yi', 'ґ' => 'g' - ), - 'cs' => array(/* Czech */ - 'č' => 'c', 'ď' => 'd', 'ě' => 'e', 'ň' => 'n', 'ř' => 'r', 'š' => 's', 'ť' => 't', 'ů' => 'u', - 'ž' => 'z', 'Č' => 'C', 'Ď' => 'D', 'Ě' => 'E', 'Ň' => 'N', 'Ř' => 'R', 'Š' => 'S', 'Ť' => 'T', - 'Ů' => 'U', 'Ž' => 'Z' - ), - 'pl' => array(/* Polish */ - 'ą' => 'a', 'ć' => 'c', 'ę' => 'e', 'ł' => 'l', 'ń' => 'n', 'ó' => 'o', 'ś' => 's', 'ź' => 'z', - 'ż' => 'z', 'Ą' => 'A', 'Ć' => 'C', 'Ę' => 'e', 'Ł' => 'L', 'Ń' => 'N', 'Ó' => 'O', 'Ś' => 'S', - 'Ź' => 'Z', 'Ż' => 'Z' - ), - 'ro' => array(/* Romanian */ - 'ă' => 'a', 'â' => 'a', 'î' => 'i', 'ș' => 's', 'ț' => 't', 'Ţ' => 'T', 'ţ' => 't' - ), - 'lv' => array(/* Latvian */ - 'ā' => 'a', 'č' => 'c', 'ē' => 'e', 'ģ' => 'g', 'ī' => 'i', 'ķ' => 'k', 'ļ' => 'l', 'ņ' => 'n', - 'š' => 's', 'ū' => 'u', 'ž' => 'z', 'Ā' => 'A', 'Č' => 'C', 'Ē' => 'E', 'Ģ' => 'G', 'Ī' => 'i', - 'Ķ' => 'k', 'Ļ' => 'L', 'Ņ' => 'N', 'Š' => 'S', 'Ū' => 'u', 'Ž' => 'Z' - ), - 'lt' => array(/* Lithuanian */ - 'ą' => 'a', 'č' => 'c', 'ę' => 'e', 'ė' => 'e', 'į' => 'i', 'š' => 's', 'ų' => 'u', 'ū' => 'u', 'ž' => 'z', - 'Ą' => 'A', 'Č' => 'C', 'Ę' => 'E', 'Ė' => 'E', 'Į' => 'I', 'Š' => 'S', 'Ų' => 'U', 'Ū' => 'U', 'Ž' => 'Z' - ), - 'vn' => array(/* Vietnamese */ - 'Á' => 'A', 'À' => 'A', 'Ả' => 'A', 'Ã' => 'A', 'Ạ' => 'A', 'Ă' => 'A', 'Ắ' => 'A', 'Ằ' => 'A', 'Ẳ' => 'A', 'Ẵ' => 'A', 'Ặ' => 'A', 'Â' => 'A', 'Ấ' => 'A', 'Ầ' => 'A', 'Ẩ' => 'A', 'Ẫ' => 'A', 'Ậ' => 'A', - 'á' => 'a', 'à' => 'a', 'ả' => 'a', 'ã' => 'a', 'ạ' => 'a', 'ă' => 'a', 'ắ' => 'a', 'ằ' => 'a', 'ẳ' => 'a', 'ẵ' => 'a', 'ặ' => 'a', 'â' => 'a', 'ấ' => 'a', 'ầ' => 'a', 'ẩ' => 'a', 'ẫ' => 'a', 'ậ' => 'a', - 'É' => 'E', 'È' => 'E', 'Ẻ' => 'E', 'Ẽ' => 'E', 'Ẹ' => 'E', 'Ê' => 'E', 'Ế' => 'E', 'Ề' => 'E', 'Ể' => 'E', 'Ễ' => 'E', 'Ệ' => 'E', - 'é' => 'e', 'è' => 'e', 'ẻ' => 'e', 'ẽ' => 'e', 'ẹ' => 'e', 'ê' => 'e', 'ế' => 'e', 'ề' => 'e', 'ể' => 'e', 'ễ' => 'e', 'ệ' => 'e', - 'Í' => 'I', 'Ì' => 'I', 'Ỉ' => 'I', 'Ĩ' => 'I', 'Ị' => 'I', 'í' => 'i', 'ì' => 'i', 'ỉ' => 'i', 'ĩ' => 'i', 'ị' => 'i', - 'Ó' => 'O', 'Ò' => 'O', 'Ỏ' => 'O', 'Õ' => 'O', 'Ọ' => 'O', 'Ô' => 'O', 'Ố' => 'O', 'Ồ' => 'O', 'Ổ' => 'O', 'Ỗ' => 'O', 'Ộ' => 'O', 'Ơ' => 'O', 'Ớ' => 'O', 'Ờ' => 'O', 'Ở' => 'O', 'Ỡ' => 'O', 'Ợ' => 'O', - 'ó' => 'o', 'ò' => 'o', 'ỏ' => 'o', 'õ' => 'o', 'ọ' => 'o', 'ô' => 'o', 'ố' => 'o', 'ồ' => 'o', 'ổ' => 'o', 'ỗ' => 'o', 'ộ' => 'o', 'ơ' => 'o', 'ớ' => 'o', 'ờ' => 'o', 'ở' => 'o', 'ỡ' => 'o', 'ợ' => 'o', - 'Ú' => 'U', 'Ù' => 'U', 'Ủ' => 'U', 'Ũ' => 'U', 'Ụ' => 'U', 'Ư' => 'U', 'Ứ' => 'U', 'Ừ' => 'U', 'Ử' => 'U', 'Ữ' => 'U', 'Ự' => 'U', - 'ú' => 'u', 'ù' => 'u', 'ủ' => 'u', 'ũ' => 'u', 'ụ' => 'u', 'ư' => 'u', 'ứ' => 'u', 'ừ' => 'u', 'ử' => 'u', 'ữ' => 'u', 'ự' => 'u', - 'Ý' => 'Y', 'Ỳ' => 'Y', 'Ỷ' => 'Y', 'Ỹ' => 'Y', 'Ỵ' => 'Y', 'ý' => 'y', 'ỳ' => 'y', 'ỷ' => 'y', 'ỹ' => 'y', 'ỵ' => 'y', - 'Đ' => 'D', 'đ' => 'd' - ), - 'ar' => array(/* Arabic */ - 'أ' => 'a', 'ب' => 'b', 'ت' => 't', 'ث' => 'th', 'ج' => 'g', 'ح' => 'h', 'خ' => 'kh', 'د' => 'd', - 'ذ' => 'th', 'ر' => 'r', 'ز' => 'z', 'س' => 's', 'ش' => 'sh', 'ص' => 's', 'ض' => 'd', 'ط' => 't', - 'ظ' => 'th', 'ع' => 'aa', 'غ' => 'gh', 'ف' => 'f', 'ق' => 'k', 'ك' => 'k', 'ل' => 'l', 'م' => 'm', - 'ن' => 'n', 'ه' => 'h', 'و' => 'o', 'ي' => 'y' - ), - 'sr' => array(/* Serbian */ - 'ђ' => 'dj', 'ј' => 'j', 'љ' => 'lj', 'њ' => 'nj', 'ћ' => 'c', 'џ' => 'dz', 'đ' => 'dj', - 'Ђ' => 'Dj', 'Ј' => 'j', 'Љ' => 'Lj', 'Њ' => 'Nj', 'Ћ' => 'C', 'Џ' => 'Dz', 'Đ' => 'Dj' - ), - 'az' => array(/* Azerbaijani */ - 'ç' => 'c', 'ə' => 'e', 'ğ' => 'g', 'ı' => 'i', 'ö' => 'o', 'ş' => 's', 'ü' => 'u', - 'Ç' => 'C', 'Ə' => 'E', 'Ğ' => 'G', 'İ' => 'I', 'Ö' => 'O', 'Ş' => 'S', 'Ü' => 'U' - ), - 'fi' => array(/* Finnish */ - 'ä' => 'a', - 'ö' => 'o' - ), - ); - - /** - * The character map for the designated language - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $map = array(); - - /** - * The character list as a string. - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $chars = ''; - - /** - * The character list as a regular expression. - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $regex = ''; - - /** - * The current language - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static $language = ''; - - /** - * Initializes the character map. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - */ - private static function initLanguageMap($language = '') - { - if (count(self::$map) > 0 && (($language == '') || ($language == self::$language))) { - return; - } - - // Is a specific map associated with $language? - if (isset(self::$maps[$language]) && is_array(self::$maps[$language])) { - // Move this map to end. This means it will have priority over others - $m = self::$maps[$language]; - unset(self::$maps[$language]); - self::$maps[$language] = $m; - } - - // Reset static vars - self::$language = $language; - self::$map = array(); - self::$chars = ''; - - foreach (self::$maps as $map) { - foreach ($map as $orig => $conv) { - self::$map[$orig] = $conv; - self::$chars .= $orig; - } - } - - self::$regex = '/[' . self::$chars . ']/u'; - } - - /** - * Remove the duplicates from an array. - * - * This is faster version than the builtin array_unique(). - * - * Notes on time requirements: - * array_unique -> O(n log n) - * array_flip -> O(n) - * - * http://stackoverflow.com/questions/8321620/array-unique-vs-array-flip - * http://php.net/manual/en/function.array-unique.php - * - * @param $array - * @return $array - */ - public static function fast_array_unique($array) - { - $array = array_keys(array_flip($array)); - - return $array; - } - - /** - * Access an array index, retrieving the value stored there if it - * exists or a default if it does not. This function allows you to - * concisely access an index which may or may not exist without - * raising a warning. - * - * @param array $var Array value to access - * @param mixed $default Default value to return if the key is not - * present in the array - * @return mixed - */ - public static function array_get(&$var, $default = null) - { - if (isset($var)) { - return $var; - } - - return $default; - } - - /** - * Display a variable's contents using nice HTML formatting and will - * properly display the value of booleans as true or false - * - * @see recursiveVarDumpHelper() - * - * @param mixed $var The variable to dump - * @return string - */ - public static function var_dump($var, $return = false, $expandLevel = 1) - { - self::$hasArray = false; - $toggScript = 'var colToggle = function(toggID) {var img = document.getElementById(toggID);if (document.getElementById(toggID + "-collapsable").style.display == "none") {document.getElementById(toggID + "-collapsable").style.display = "inline";setImg(toggID, 0);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling;while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}} else {document.getElementById(toggID + "-collapsable").style.display = "none";setImg(toggID, 1);var previousSibling = document.getElementById(toggID + "-collapsable").previousSibling; while (previousSibling != null && (previousSibling.nodeType != 1 || previousSibling.tagName.toLowerCase() != "br")) {previousSibling = previousSibling.previousSibling;}}};'; - $imgScript = 'var setImg = function(objID,imgID,addStyle) {var imgStore = ["data:image/png;base64,' . self::$icon_collapse . '", "data:image/png;base64,' . self::$icon_expand . '"];if (objID) {document.getElementById(objID).setAttribute("src", imgStore[imgID]);if (addStyle){document.getElementById(objID).setAttribute("style", "position:relative;left:-5px;top:-1px;cursor:pointer;");}}};'; - $jsCode = preg_replace('/ +/', ' ', ''); - $html = '
';
-        $done  = array();
-        $html .= self::recursiveVarDumpHelper($var, intval($expandLevel), 0, $done);
-        $html .= '
'; - - if (self::$hasArray) { - $html = $jsCode . $html; - } - - if (!$return) { - echo $html; - } - - return $html; - } - - /** - * Display a variable's contents using nice HTML formatting (Without - * the
 tag) and will properly display the values of variables
-     * like booleans and resources. Supports collapsable arrays and objects
-     * as well.
-     *
-     * @param  mixed $var The variable to dump
-     * @return string
-     */
-    protected static function recursiveVarDumpHelper($var, $expLevel, $depth = 0, $done = array())
-    {
-        $html = '';
-
-        if ($expLevel > 0) {
-            $expLevel--;
-            $setImg = 0;
-            $setStyle = 'display:inline;';
-        } elseif ($expLevel == 0) {
-            $setImg = 1;
-            $setStyle='display:none;';
-        } elseif ($expLevel < 0) {
-            $setImg = 0;
-            $setStyle = 'display:inline;';
-        }
-
-        if (is_bool($var)) {
-            $html .= 'bool(' . (($var) ? 'true' : 'false') . ')';
-        } elseif (is_int($var)) {
-            $html .= 'int(' . $var . ')';
-        } elseif (is_float($var)) {
-            $html .= 'float(' . $var . ')';
-        } elseif (is_string($var)) {
-            $html .= 'string(' . strlen($var) . ') "' . self::htmlentities($var) . '"';
-        } elseif (is_null($var)) {
-            $html .= 'NULL';
-        } elseif (is_resource($var)) {
-            $html .= 'resource("' . get_resource_type($var) . '") "' . $var . '"';
-        } elseif (is_array($var)) {
-            // Check for recursion
-            if ($depth > 0) {
-                foreach ($done as $prev) {
-                    if ($prev === $var) {
-                        $html .= 'array(' . count($var) . ') *RECURSION DETECTED*';
-                        return $html;
-                    }
-                }
-
-                // Keep track of variables we have already processed to detect recursion
-                $done[] = &$var;
-            }
-
-            self::$hasArray = true;
-            $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000);
-
-            $html .= (!empty($var) ? ' ' : '') . 'array(' . count($var) . ')';
-            if (!empty($var)) {
-                $html .= ' 
[
'; - - $indent = 4; - $longest_key = 0; - - foreach ($var as $key => $value) { - if (is_string($key)) { - $longest_key = max($longest_key, strlen($key) + 2); - } else { - $longest_key = max($longest_key, strlen($key)); - } - } - - foreach ($var as $key => $value) { - if (is_numeric($key)) { - $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); - } else { - $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); - } - - $html .= ' => '; - - $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); - - foreach ($value as $line => $val) { - if ($line != 0) { - $value[$line] = str_repeat(' ', $indent * 2) . $val; - } - } - - $html .= implode('
', $value) . '
'; - } - - $html .= ']
'; - } - } elseif (is_object($var)) { - // Check for recursion - foreach ($done as $prev) { - if ($prev === $var) { - $html .= 'object(' . get_class($var) . ') *RECURSION DETECTED*'; - return $html; - } - } - - // Keep track of variables we have already processed to detect recursion - $done[] = &$var; - - self::$hasArray=true; - $uuid = 'include-php-' . uniqid() . mt_rand(1, 1000000); - - $html .= ' object(' . get_class($var) . ')
[
'; - - $varArray = (array) $var; - - $indent = 4; - $longest_key = 0; - - foreach ($varArray as $key => $value) { - if (substr($key, 0, 2) == "\0*") { - unset($varArray[$key]); - $key = 'protected:' . substr($key, 3); - $varArray[$key] = $value; - } elseif (substr($key, 0, 1) == "\0") { - unset($varArray[$key]); - $key = 'private:' . substr($key, 1, strpos(substr($key, 1), "\0")) . ':' . substr($key, strpos(substr($key, 1), "\0") + 2); - $varArray[$key] = $value; - } - - if (is_string($key)) { - $longest_key = max($longest_key, strlen($key) + 2); - } else { - $longest_key = max($longest_key, strlen($key)); - } - } - - foreach ($varArray as $key => $value) { - if (is_numeric($key)) { - $html .= str_repeat(' ', $indent) . str_pad($key, $longest_key, ' '); - } else { - $html .= str_repeat(' ', $indent) . str_pad('"' . self::htmlentities($key) . '"', $longest_key, ' '); - } - - $html .= ' => '; - - $value = explode('
', self::recursiveVarDumpHelper($value, $expLevel, $depth + 1, $done)); - - foreach ($value as $line => $val) { - if ($line != 0) { - $value[$line] = str_repeat(' ', $indent * 2) . $val; - } - } - - $html .= implode('
', $value) . '
'; - } - - $html .= ']
'; - } - - return $html; - } - - /** - * Converts any accent characters to their equivalent normal characters - * and converts any other non-alphanumeric characters to dashes, then - * converts any sequence of two or more dashes to a single dash. This - * function generates slugs safe for use as URLs, and if you pass true - * as the second parameter, it will create strings safe for use as CSS - * classes or IDs. - * - * @param string $string A string to convert to a slug - * @param string $separator The string to separate words with - * @param boolean $css_mode Whether or not to generate strings safe for - * CSS classes/IDs (Default to false) - * @return string - */ - public static function slugify($string, $separator = '-', $css_mode = false) - { - // Compatibility with 1.0.* parameter ordering for semver - if ($separator === true || $separator === false) { - $css_mode = $separator; - $separator = '-'; - - // Raise deprecation error - trigger_error( - 'util::slugify() now takes $css_mode as the third parameter, please update your code', - E_USER_DEPRECATED - ); - } - - $slug = preg_replace('/([^a-z0-9]+)/', $separator, strtolower(self::remove_accents($string))); - - if ($css_mode) { - $digits = array('zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine'); - - if (is_numeric(substr($slug, 0, 1))) { - $slug = $digits[substr($slug, 0, 1)] . substr($slug, 1); - } - } - - return $slug; - } - - /** - * Checks to see if a string is utf8 encoded. - * - * NOTE: This function checks for 5-Byte sequences, UTF8 - * has Bytes Sequences with a maximum length of 4. - * - * Written by Tony Ferrara - * - * @param string $string The string to be checked - * @return boolean - */ - public static function seems_utf8($string) - { - if (function_exists('mb_check_encoding')) { - // If mbstring is available, this is significantly faster than - // using PHP regexps. - return mb_check_encoding($string, 'UTF-8'); - } - - // @codeCoverageIgnoreStart - return self::seemsUtf8Regex($string); - // @codeCoverageIgnoreEnd - } - - /** - * A non-Mbstring UTF-8 checker. - * - * @param $string - * @return bool - */ - protected static function seemsUtf8Regex($string) - { - // Obtained from http://stackoverflow.com/a/11709412/430062 with permission. - $regex = '/( - [\xC0-\xC1] # Invalid UTF-8 Bytes - | [\xF5-\xFF] # Invalid UTF-8 Bytes - | \xE0[\x80-\x9F] # Overlong encoding of prior code point - | \xF0[\x80-\x8F] # Overlong encoding of prior code point - | [\xC2-\xDF](?![\x80-\xBF]) # Invalid UTF-8 Sequence Start - | [\xE0-\xEF](?![\x80-\xBF]{2}) # Invalid UTF-8 Sequence Start - | [\xF0-\xF4](?![\x80-\xBF]{3}) # Invalid UTF-8 Sequence Start - | (?<=[\x0-\x7F\xF5-\xFF])[\x80-\xBF] # Invalid UTF-8 Sequence Middle - | (? - * - * @param string $brokenSerializedData - * @return string - */ - public static function fix_broken_serialization($brokenSerializedData) - { - $fixdSerializedData = preg_replace_callback('!s:(\d+):"(.*?)";!', function($matches) { - $snip = $matches[2]; - return 's:' . strlen($snip) . ':"' . $snip . '";'; - }, $brokenSerializedData); - - return $fixdSerializedData; - } - - /** - * Checks to see if the page is being server over SSL or not - * - * @return boolean - */ - public static function is_https() - { - return isset($_SERVER['HTTPS']) && !empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] != 'off'; - } - - /** - * Add or remove query arguments to the URL. - * - * @param mixed $newKey Either newkey or an associative array - * @param mixed $newValue Either newvalue or oldquery or uri - * @param mixed $uri URI or URL to append the queru/queries to. - * @return string - */ - public static function add_query_arg($newKey, $newValue = null, $uri = null) - { - // Was an associative array of key => value pairs passed? - if (is_array($newKey)) { - $newParams = $newKey; - - // Was the URL passed as an argument? - if (!is_null($newValue)) { - $uri = $newValue; - } elseif (!is_null($uri)) { - $uri = $uri; - } else { - $uri = self::array_get($_SERVER['REQUEST_URI'], ''); - } - } else { - $newParams = array($newKey => $newValue); - - // Was the URL passed as an argument? - $uri = is_null($uri) ? self::array_get($_SERVER['REQUEST_URI'], '') : $uri; - } - - // Parse the URI into it's components - $puri = parse_url($uri); - - if (isset($puri['query'])) { - parse_str($puri['query'], $queryParams); - $queryParams = array_merge($queryParams, $newParams); - } elseif (isset($puri['path']) && strstr($puri['path'], '=') !== false) { - $puri['query'] = $puri['path']; - unset($puri['path']); - parse_str($puri['query'], $queryParams); - $queryParams = array_merge($queryParams, $newParams); - } else { - $queryParams = $newParams; - } - - // Strip out any query params that are set to false. - // Properly handle valueless parameters. - foreach ($queryParams as $param => $value) { - if ($value === false) { - unset($queryParams[$param]); - } elseif ($value === null) { - $queryParams[$param] = ''; - } - } - - // Re-construct the query string - $puri['query'] = http_build_query($queryParams); - - // Strip = from valueless parameters. - $puri['query'] = preg_replace('/=(?=&|$)/', '', $puri['query']); - - - // Re-construct the entire URL - $nuri = self::http_build_url($puri); - - // Make the URI consistent with our input - if ($nuri[0] === '/' && strstr($uri, '/') === false) { - $nuri = substr($nuri, 1); - } - - if ($nuri[0] === '?' && strstr($uri, '?') === false) { - $nuri = substr($nuri, 1); - } - - return rtrim($nuri, '?'); - } - - /** - * Removes an item or list from the query string. - * - * @param string|array $keys Query key or keys to remove. - * @param bool $uri When false uses the $_SERVER value - * @return string - */ - public static function remove_query_arg($keys, $uri = null) - { - if (is_array($keys)) { - return self::add_query_arg(array_combine($keys, array_fill(0, count($keys), false)), $uri); - } - - return self::add_query_arg(array($keys => false), $uri); - } - - /** - * Build a URL. - * - * The parts of the second URL will be merged into the first according to - * the flags argument. - * - * @author Jake Smith - * @see https://github.com/jakeasmith/http_build_url/ - * - * @param mixed $url (part(s) of) an URL in form of a string or - * associative array like parse_url() returns - * @param mixed $parts same as the first argument - * @param int $flags a bitmask of binary or'ed HTTP_URL constants; - * HTTP_URL_REPLACE is the default - * @param array $new_url if set, it will be filled with the parts of the - * composed url like parse_url() would return - * @return string - */ - public static function http_build_url($url, $parts = array(), $flags = self::HTTP_URL_REPLACE, &$new_url = array()) - { - is_array($url) || $url = parse_url($url); - is_array($parts) || $parts = parse_url($parts); - - isset($url['query']) && is_string($url['query']) || $url['query'] = null; - isset($parts['query']) && is_string($parts['query']) || $parts['query'] = null; - - $keys = array('user', 'pass', 'port', 'path', 'query', 'fragment'); - - // HTTP_URL_STRIP_ALL and HTTP_URL_STRIP_AUTH cover several other flags. - if ($flags & self::HTTP_URL_STRIP_ALL) { - $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS - | self::HTTP_URL_STRIP_PORT | self::HTTP_URL_STRIP_PATH - | self::HTTP_URL_STRIP_QUERY | self::HTTP_URL_STRIP_FRAGMENT; - } elseif ($flags & self::HTTP_URL_STRIP_AUTH) { - $flags |= self::HTTP_URL_STRIP_USER | self::HTTP_URL_STRIP_PASS; - } - - // Schema and host are alwasy replaced - foreach (array('scheme', 'host') as $part) { - if (isset($parts[$part])) { - $url[$part] = $parts[$part]; - } - } - - if ($flags & self::HTTP_URL_REPLACE) { - foreach ($keys as $key) { - if (isset($parts[$key])) { - $url[$key] = $parts[$key]; - } - } - } else { - if (isset($parts['path']) && ($flags & self::HTTP_URL_JOIN_PATH)) { - if (isset($url['path']) && substr($parts['path'], 0, 1) !== '/') { - $url['path'] = rtrim( - str_replace(basename($url['path']), '', $url['path']), - '/' - ) . '/' . ltrim($parts['path'], '/'); - } else { - $url['path'] = $parts['path']; - } - } - - if (isset($parts['query']) && ($flags & self::HTTP_URL_JOIN_QUERY)) { - if (isset($url['query'])) { - parse_str($url['query'], $url_query); - parse_str($parts['query'], $parts_query); - - $url['query'] = http_build_query( - array_replace_recursive( - $url_query, - $parts_query - ) - ); - } else { - $url['query'] = $parts['query']; - } - } - } - - if (isset($url['path']) && substr($url['path'], 0, 1) !== '/') { - $url['path'] = '/' . $url['path']; - } - - foreach ($keys as $key) { - $strip = 'HTTP_URL_STRIP_' . strtoupper($key); - if ($flags & constant('utilphp\\util::' . $strip)) { - unset($url[$key]); - } - } - - $parsed_string = ''; - - if (isset($url['scheme'])) { - $parsed_string .= $url['scheme'] . '://'; - } - - if (isset($url['user'])) { - $parsed_string .= $url['user']; - - if (isset($url['pass'])) { - $parsed_string .= ':' . $url['pass']; - } - - $parsed_string .= '@'; - } - - if (isset($url['host'])) { - $parsed_string .= $url['host']; - } - - if (isset($url['port'])) { - $parsed_string .= ':' . $url['port']; - } - - if (!empty($url['path'])) { - $parsed_string .= $url['path']; - } else { - $parsed_string .= '/'; - } - - if (isset($url['query'])) { - $parsed_string .= '?' . $url['query']; - } - - if (isset($url['fragment'])) { - $parsed_string .= '#' . $url['fragment']; - } - - $new_url = $url; - - return $parsed_string; - } - - /** - * Converts many english words that equate to true or false to boolean. - * - * Supports 'y', 'n', 'yes', 'no' and a few other variations. - * - * @param string $string The string to convert to boolean - * @param bool $default The value to return if we can't match any - * yes/no words - * @return boolean - */ - public static function str_to_bool($string, $default = false) - { - $yes_words = 'affirmative|all right|aye|indubitably|most assuredly|ok|of course|okay|sure thing|y|yes+|yea|yep|sure|yeah|true|t|on|1|oui|vrai'; - $no_words = 'no*|no way|nope|nah|na|never|absolutely not|by no means|negative|never ever|false|f|off|0|non|faux'; - - if (preg_match('/^(' . $yes_words . ')$/i', $string)) { - return true; - } elseif (preg_match('/^(' . $no_words . ')$/i', $string)) { - return false; - } - - return $default; - } - - /** - * Check if a string starts with the given string. - * - * @param string $string - * @param string $starts_with - * @return boolean - */ - public static function starts_with($string, $starts_with) - { - return strpos($string, $starts_with) === 0; - } - - /** - * Check if a string ends with the given string. - * - * @param string $string - * @param string $starts_with - * @return boolean - */ - public static function ends_with($string, $ends_with) - { - return substr($string, -strlen($ends_with)) === $ends_with; - } - - /** - * Check if a string contains another string. - * - * @param string $haystack - * @param string $needle - * @return boolean - */ - public static function str_contains($haystack, $needle) - { - return strpos($haystack, $needle) !== false; - } - - /** - * Check if a string contains another string. This version is case - * insensitive. - * - * @param string $haystack - * @param string $needle - * @return boolean - */ - public static function str_icontains($haystack, $needle) - { - return stripos($haystack, $needle) !== false; - } - - /** - * Return the file extension of the given filename. - * - * @param string $filename - * @return string - */ - public static function get_file_ext($filename) - { - return pathinfo($filename, PATHINFO_EXTENSION); - } - - /** - * Removes a directory (and its contents) recursively. - * - * Contributed by Askar (ARACOOL) - * - * @param string $dir The directory to be deleted recursively - * @param bool $traverseSymlinks Delete contents of symlinks recursively - * @return bool - * @throws \RuntimeException - */ - public static function rmdir($dir, $traverseSymlinks = false) - { - if (!file_exists($dir)) { - return true; - } elseif (!is_dir($dir)) { - throw new \RuntimeException('Given path is not a directory'); - } - - if (!is_link($dir) || $traverseSymlinks) { - foreach (scandir($dir) as $file) { - if ($file === '.' || $file === '..') { - continue; - } - - $currentPath = $dir . '/' . $file; - - if (is_dir($currentPath)) { - self::rmdir($currentPath, $traverseSymlinks); - } elseif (!unlink($currentPath)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $currentPath); - // @codeCoverageIgnoreEnd - } - } - } - - // Windows treats removing directory symlinks identically to removing directories. - if (is_link($dir) && !defined('PHP_WINDOWS_VERSION_MAJOR')) { - if (!unlink($dir)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $dir); - // @codeCoverageIgnoreEnd - } - } else { - if (!rmdir($dir)) { - // @codeCoverageIgnoreStart - throw new \RuntimeException('Unable to delete ' . $dir); - // @codeCoverageIgnoreEnd - } - } - - return true; - } - - /** - * Convert entities, while preserving already-encoded entities. - * - * @param string $string The text to be converted - * @return string - */ - public static function htmlentities($string, $preserve_encoded_entities = false) - { - if ($preserve_encoded_entities) { - // @codeCoverageIgnoreStart - if (defined('HHVM_VERSION')) { - $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES); - } else { - $translation_table = get_html_translation_table(HTML_ENTITIES, ENT_QUOTES, self::mbInternalEncoding()); - } - // @codeCoverageIgnoreEnd - - $translation_table[chr(38)] = '&'; - return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); - } - - return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); - } - - /** - * Convert >, <, ', " and & to html entities, but preserves entities that - * are already encoded. - * - * @param string $string The text to be converted - * @return string - */ - public static function htmlspecialchars($string, $preserve_encoded_entities = false) - { - if ($preserve_encoded_entities) { - // @codeCoverageIgnoreStart - if (defined('HHVM_VERSION')) { - $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES); - } else { - $translation_table = get_html_translation_table(HTML_SPECIALCHARS, ENT_QUOTES, self::mbInternalEncoding()); - } - // @codeCoverageIgnoreEnd - - $translation_table[chr(38)] = '&'; - - return preg_replace('/&(?![A-Za-z]{0,4}\w{2,3};|#[0-9]{2,3};)/', '&', strtr($string, $translation_table)); - } - - return htmlentities($string, ENT_QUOTES, self::mbInternalEncoding()); - } - - /** - * Transliterates characters to their ASCII equivalents. - * - * Part of the URLify.php Project - * - * @see https://github.com/jbroadway/urlify/blob/master/URLify.php - * - * @param string $text Text that might have not-ASCII characters - * @param string $language Specifies a priority for a specific language. - * @return string Filtered string with replaced "nice" characters - */ - public static function downcode($text, $language = '') - { - self::initLanguageMap($language); - - if (self::seems_utf8($text)) { - if (preg_match_all(self::$regex, $text, $matches)) { - for ($i = 0; $i < count($matches[0]); $i++) { - $char = $matches[0][$i]; - if (isset(self::$map[$char])) { - $text = str_replace($char, self::$map[$char], $text); - } - } - } - } else { - // Not a UTF-8 string so we assume its ISO-8859-1 - $search = "\x80\x83\x8a\x8e\x9a\x9e\x9f\xa2\xa5\xb5\xc0\xc1\xc2\xc3\xc4\xc5\xc7\xc8\xc9\xca\xcb\xcc\xcd"; - $search .= "\xce\xcf\xd1\xd2\xd3\xd4\xd5\xd6\xd8\xd9\xda\xdb\xdc\xdd\xe0\xe1\xe2\xe3\xe4\xe5\xe7\xe8\xe9"; - $search .= "\xea\xeb\xec\xed\xee\xef\xf1\xf2\xf3\xf4\xf5\xf6\xf8\xf9\xfa\xfb\xfc\xfd\xff"; - $text = strtr($text, $search, 'EfSZszYcYuAAAAAACEEEEIIIINOOOOOOUUUUYaaaaaaceeeeiiiinoooooouuuuyy'); - - // These latin characters should be represented by two characters so - // we can't use strtr - $complexSearch = array("\x8c", "\x9c", "\xc6", "\xd0", "\xde", "\xdf", "\xe6", "\xf0", "\xfe"); - $complexReplace = array('OE', 'oe', 'AE', 'DH', 'TH', 'ss', 'ae', 'dh', 'th'); - $text = str_replace($complexSearch, $complexReplace, $text); - } - - return $text; - } - - /** - * Converts all accent characters to ASCII characters. - * - * If there are no accent characters, then the string given is just - * returned. - * - * @param string $string Text that might have accent characters - * @param string $language Specifies a priority for a specific language. - * @return string Filtered string with replaced "nice" characters - */ - public static function remove_accents($string, $language = '') - { - if (!preg_match('/[\x80-\xff]/', $string)) { - return $string; - } - - return self::downcode($string, $language); - } - - /** - * Strip all witespaces from the given string. - * - * @param string $string The string to strip - * @return string - */ - public static function strip_space($string) - { - return preg_replace('/\s+/', '', $string); - } - - /** - * Sanitize a string by performing the following operation : - * - Remove accents - * - Lower the string - * - Remove punctuation characters - * - Strip whitespaces - * - * @param string $string the string to sanitize - * @return string - */ - public static function sanitize_string($string) - { - $string = self::remove_accents($string); - $string = strtolower($string); - $string = preg_replace('/[^a-zA-Z 0-9]+/', '', $string); - $string = self::strip_space($string); - - return $string; - } - - /** - * Pads a given string with zeroes on the left. - * - * @param int $number The number to pad - * @param int $length The total length of the desired string - * @return string - */ - public static function zero_pad($number, $length) - { - return str_pad($number, $length, '0', STR_PAD_LEFT); - } - - /** - * Converts a unix timestamp to a relative time string, such as "3 days ago" - * or "2 weeks ago". - * - * @param int $from The date to use as a starting point - * @param int $to The date to compare to, defaults to now - * @param string $suffix The string to add to the end, defaults to " ago" - * @return string - */ - public static function human_time_diff($from, $to = '', $as_text = false, $suffix = ' ago') - { - if ($to == '') { - $to = time(); - } - - $from = new \DateTime(date('Y-m-d H:i:s', $from)); - $to = new \DateTime(date('Y-m-d H:i:s', $to)); - $diff = $from->diff($to); - - if ($diff->y > 1) { - $text = $diff->y . ' years'; - } elseif ($diff->y == 1) { - $text = '1 year'; - } elseif ($diff->m > 1) { - $text = $diff->m . ' months'; - } elseif ($diff->m == 1) { - $text = '1 month'; - } elseif ($diff->d > 7) { - $text = ceil($diff->d / 7) . ' weeks'; - } elseif ($diff->d == 7) { - $text = '1 week'; - } elseif ($diff->d > 1) { - $text = $diff->d . ' days'; - } elseif ($diff->d == 1) { - $text = '1 day'; - } elseif ($diff->h > 1) { - $text = $diff->h . ' hours'; - } elseif ($diff->h == 1) { - $text = ' 1 hour'; - } elseif ($diff->i > 1) { - $text = $diff->i . ' minutes'; - } elseif ($diff->i == 1) { - $text = '1 minute'; - } elseif ($diff->s > 1) { - $text = $diff->s . ' seconds'; - } else { - $text = '1 second'; - } - - if ($as_text) { - $text = explode(' ', $text, 2); - $text = self::number_to_word($text[0]) . ' ' . $text[1]; - } - - return trim($text) . $suffix; - } - - /** - * Converts a number into the text equivalent. For example, 456 becomes four - * hundred and fifty-six. - * - * Part of the IntToWords Project. - * - * @param int|float $number The number to convert into text - * @return string - */ - public static function number_to_word($number) - { - $number = (string) $number; - - if (strpos($number, '.') !== false) { - list($number, $decimal) = explode('.', $number); - } else { - $decimal = false; - } - - $output = ''; - - if ($number[0] == '-') { - $output = 'negative '; - $number = ltrim($number, '-'); - } elseif ($number[0] == '+') { - $output = 'positive '; - $number = ltrim($number, '+'); - } - - if ($number[0] == '0') { - $output .= 'zero'; - } else { - $length = 19; - $number = str_pad($number, 60, '0', STR_PAD_LEFT); - $group = rtrim(chunk_split($number, 3, ' '), ' '); - $groups = explode(' ', $group); - $groups2 = array(); - - foreach ($groups as $group) { - $group[1] = isset($group[1]) ? $group[1] : null; - $group[2] = isset($group[2]) ? $group[2] : null; - $groups2[] = self::numberToWordThreeDigits($group[0], $group[1], $group[2]); - } - - for ($z = 0; $z < count($groups2); $z++) { - if ($groups2[$z] != '') { - $output .= $groups2[$z] . self::numberToWordConvertGroup($length - $z); - $output .= ($z < $length && ! array_search('', array_slice($groups2, $z + 1, -1)) && $groups2[$length] != '' && $groups[$length][0] == '0' ? ' and ' : ', '); - } - } - - $output = rtrim($output, ', '); - } - - if ($decimal > 0) { - $output .= ' point'; - - for ($i = 0; $i < strlen($decimal); $i++) { - $output .= ' ' . self::numberToWordConvertDigit($decimal[$i]); - } - } - - return $output; - } - - protected static function numberToWordConvertGroup($index) - { - switch($index) { - case 11: - return ' decillion'; - case 10: - return ' nonillion'; - case 9: - return ' octillion'; - case 8: - return ' septillion'; - case 7: - return ' sextillion'; - case 6: - return ' quintrillion'; - case 5: - return ' quadrillion'; - case 4: - return ' trillion'; - case 3: - return ' billion'; - case 2: - return ' million'; - case 1: - return ' thousand'; - case 0: - return ''; - } - - return ''; - } - - protected static function numberToWordThreeDigits($digit1, $digit2, $digit3) - { - $output = ''; - - if ($digit1 == '0' && $digit2 == '0' && $digit3 == '0') { - return ''; - } - - if ($digit1 != '0') { - $output .= self::numberToWordConvertDigit($digit1) . ' hundred'; - - if ($digit2 != '0' || $digit3 != '0') { - $output .= ' and '; - } - } - if ($digit2 != '0') { - $output .= self::numberToWordTwoDigits($digit2, $digit3); - } elseif ($digit3 != '0') { - $output .= self::numberToWordConvertDigit($digit3); - } - - return $output; - } - - protected static function numberToWordTwoDigits($digit1, $digit2) - { - if ($digit2 == '0') { - switch ($digit1) { - case '1': - return 'ten'; - case '2': - return 'twenty'; - case '3': - return 'thirty'; - case '4': - return 'forty'; - case '5': - return 'fifty'; - case '6': - return 'sixty'; - case '7': - return 'seventy'; - case '8': - return 'eighty'; - case '9': - return 'ninety'; - } - } elseif ($digit1 == '1') { - switch ($digit2) { - case '1': - return 'eleven'; - case '2': - return 'twelve'; - case '3': - return 'thirteen'; - case '4': - return 'fourteen'; - case '5': - return 'fifteen'; - case '6': - return 'sixteen'; - case '7': - return 'seventeen'; - case '8': - return 'eighteen'; - case '9': - return 'nineteen'; - } - } else { - $second_digit = self::numberToWordConvertDigit($digit2); - - switch ($digit1) { - case '2': - return "twenty-{$second_digit}"; - case '3': - return "thirty-{$second_digit}"; - case '4': - return "forty-{$second_digit}"; - case '5': - return "fifty-{$second_digit}"; - case '6': - return "sixty-{$second_digit}"; - case '7': - return "seventy-{$second_digit}"; - case '8': - return "eighty-{$second_digit}"; - case '9': - return "ninety-{$second_digit}"; - } - } - } - - /** - * @param $digit - * @return string - * @throws \LogicException - */ - protected static function numberToWordConvertDigit($digit) - { - switch ($digit) { - case '0': - return 'zero'; - case '1': - return 'one'; - case '2': - return 'two'; - case '3': - return 'three'; - case '4': - return 'four'; - case '5': - return 'five'; - case '6': - return 'six'; - case '7': - return 'seven'; - case '8': - return 'eight'; - case '9': - return 'nine'; - default: - throw new \LogicException('Not a number'); - } - } - - /** - * Calculates percentage of numerator and denominator. - * - * @param int|float $numerator - * @param int|float $denominator - * @param int $decimals - * @param string $dec_point - * @param string $thousands_sep - * @return int|float - */ - public static function calculate_percentage($numerator, $denominator, $decimals = 2, $dec_point = '.', $thousands_sep = ',') - { - return number_format(($numerator / $denominator) * 100, $decimals, $dec_point, $thousands_sep); - } - - /** - * Transmit UTF-8 content headers if the headers haven't already been sent. - * - * @param string $content_type The content type to send out - * @return boolean - */ - public static function utf8_headers($content_type = 'text/html') - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - header('Content-type: ' . $content_type . '; charset=utf-8'); - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Transmit headers that force a browser to display the download file - * dialog. Cross browser compatible. Only fires if headers have not - * already been sent. - * - * @param string $filename The name of the filename to display to - * browsers - * @param string $content The content to output for the download. - * If you don't specify this, just the - * headers will be sent - * @return boolean - */ - public static function force_download($filename, $content = false) - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - // Required for some browsers - if (ini_get('zlib.output_compression')) { - @ini_set('zlib.output_compression', 'Off'); - } - - header('Pragma: public'); - header('Expires: 0'); - header('Cache-Control: must-revalidate, post-check=0, pre-check=0'); - - // Required for certain browsers - header('Cache-Control: private', false); - - header('Content-Disposition: attachment; filename="' . basename(str_replace('"', '', $filename)) . '";'); - header('Content-Type: application/force-download'); - header('Content-Transfer-Encoding: binary'); - - if ($content) { - header('Content-Length: ' . strlen($content)); - } - - ob_clean(); - flush(); - - if ($content) { - echo $content; - } - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Sets the headers to prevent caching for the different browsers. - * - * Different browsers support different nocache headers, so several - * headers must be sent so that all of them get the point that no - * caching should occur - * - * @return boolean - */ - public static function nocache_headers() - { - // @codeCoverageIgnoreStart - if (!headers_sent()) { - header('Expires: Wed, 11 Jan 1984 05:00:00 GMT'); - header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT'); - header('Cache-Control: no-cache, must-revalidate, max-age=0'); - header('Pragma: no-cache'); - - return true; - } - - return false; - // @codeCoverageIgnoreEnd - } - - /** - * Generates a string of random characters. - * - * @throws LengthException If $length is bigger than the available - * character pool and $no_duplicate_chars is - * enabled - * - * @param integer $length The length of the string to - * generate - * @param boolean $human_friendly Whether or not to make the - * string human friendly by - * removing characters that can be - * confused with other characters ( - * O and 0, l and 1, etc) - * @param boolean $include_symbols Whether or not to include - * symbols in the string. Can not - * be enabled if $human_friendly is - * true - * @param boolean $no_duplicate_chars Whether or not to only use - * characters once in the string. - * @return string - */ - public static function random_string($length = 16, $human_friendly = true, $include_symbols = false, $no_duplicate_chars = false) - { - $nice_chars = 'ABCDEFGHJKLMNPQRSTUVWXYZabcdefhjkmnprstuvwxyz23456789'; - $all_an = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890'; - $symbols = '!@#$%^&*()~_-=+{}[]|:;<>,.?/"\'\\`'; - $string = ''; - - // Determine the pool of available characters based on the given parameters - if ($human_friendly) { - $pool = $nice_chars; - } else { - $pool = $all_an; - - if ($include_symbols) { - $pool .= $symbols; - } - } - - if (!$no_duplicate_chars) { - return substr(str_shuffle(str_repeat($pool, $length)), 0, $length); - } - - // Don't allow duplicate letters to be disabled if the length is - // longer than the available characters - if ($no_duplicate_chars && strlen($pool) < $length) { - throw new \LengthException('$length exceeds the size of the pool and $no_duplicate_chars is enabled'); - } - - // Convert the pool of characters into an array of characters and - // shuffle the array - $pool = str_split($pool); - $poolLength = count($pool); - $rand = mt_rand(0, $poolLength - 1); - - // Generate our string - for ($i = 0; $i < $length; $i++) { - $string .= $pool[$rand]; - - // Remove the character from the array to avoid duplicates - array_splice($pool, $rand, 1); - - // Generate a new number - if (($poolLength - 2 - $i) > 0) { - $rand = mt_rand(0, $poolLength - 2 - $i); - } else { - $rand = 0; - } - } - - return $string; - } - - /** - * Generate secure random string of given length - * If 'openssl_random_pseudo_bytes' is not available - * then generate random string using default function - * - * Part of the Laravel Project - * - * @param int $length length of string - * @return bool - */ - public static function secure_random_string($length = 16) - { - if (function_exists('openssl_random_pseudo_bytes')) { - $bytes = openssl_random_pseudo_bytes($length * 2); - - if ($bytes === false) { - throw new \LengthException('$length is not accurate, unable to generate random string'); - } - - return substr(str_replace(array('/', '+', '='), '', base64_encode($bytes)), 0, $length); - } - - // @codeCoverageIgnoreStart - return static::random_string($length); - // @codeCoverageIgnoreEnd - } - - /** - * Check if a given string matches a given pattern. - * - * Contributed by Abhimanyu Sharma - * - * @param string $pattern Parttern of string exptected - * @param string $string String that need to be matched - * @return bool - */ - public static function match_string($pattern, $string, $caseSensitive = true) - { - if ($pattern == $string) { - return true; - } - - // Preg flags - $flags = $caseSensitive ? '' : 'i'; - - // Escape any regex special characters - $pattern = preg_quote($pattern, '#'); - - // Unescape * which is our wildcard character and change it to .* - $pattern = str_replace('\*', '.*', $pattern); - - return (bool) preg_match('#^' . $pattern . '$#' . $flags, $string); - } - - /** - * Validate an email address. - * - * @param string $possible_email An email address to validate - * @return bool - */ - public static function validate_email($possible_email) - { - return (bool) filter_var($possible_email, FILTER_VALIDATE_EMAIL); - } - - /** - * Return the URL to a user's gravatar. - * - * @param string $email The email of the user - * @param integer $size The size of the gravatar - * @return string - */ - public static function get_gravatar($email, $size = 32) - { - if (self::is_https()) { - $url = 'https://secure.gravatar.com/'; - } else { - $url = 'http://www.gravatar.com/'; - } - - $url .= 'avatar/' . md5($email) . '?s=' . (int) abs($size); - - return $url; - } - - /** - * Turns all of the links in a string into HTML links. - * - * Part of the LinkifyURL Project - * - * @param string $text The string to parse - * @return string - */ - public static function linkify($text) - { - $text = preg_replace('/'/', ''', $text); // IE does not handle ' entity! - $section_html_pattern = '%# Rev:20100913_0900 github.com/jmrware/LinkifyURL - # Section text into HTML tags and everything else. - ( # $1: Everything not HTML tag. - [^<]+(?:(?!... tag. - ]*> # opening tag. - [^<]*(?:(?! # closing tag. - ) # End $2: - %ix'; - - return preg_replace_callback($section_html_pattern, array(__CLASS__, 'linkifyCallback'), $text); - } - - /** - * Callback for the preg_replace in the linkify() method. - * - * Part of the LinkifyURL Project - * - * @param array $matches Matches from the preg_ function - * @return string - */ - protected static function linkifyRegex($text) - { - $url_pattern = '/# Rev:20100913_0900 github.com\/jmrware\/LinkifyURL - # Match http & ftp URL that is not already linkified. - # Alternative 1: URL delimited by (parentheses). - (\() # $1 "(" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $2: URL. - (\)) # $3: ")" end delimiter. - | # Alternative 2: URL delimited by [square brackets]. - (\[) # $4: "[" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $5: URL. - (\]) # $6: "]" end delimiter. - | # Alternative 3: URL delimited by {curly braces}. - (\{) # $7: "{" start delimiter. - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $8: URL. - (\}) # $9: "}" end delimiter. - | # Alternative 4: URL delimited by . - (<|&(?:lt|\#60|\#x3c);) # $10: "<" start delimiter (or HTML entity). - ((?:ht|f)tps?:\/\/[a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]+) # $11: URL. - (>|&(?:gt|\#62|\#x3e);) # $12: ">" end delimiter (or HTML entity). - | # Alternative 5: URL not delimited by (), [], {} or <>. - (# $13: Prefix proving URL not already linked. - (?: ^ # Can be a beginning of line or string, or - | [^=\s\'"\]] # a non-"=", non-quote, non-"]", followed by - ) \s*[\'"]? # optional whitespace and optional quote; - | [^=\s]\s+ # or... a non-equals sign followed by whitespace. - ) # End $13. Non-prelinkified-proof prefix. - (\b # $14: Other non-delimited URL. - (?:ht|f)tps?:\/\/ # Required literal http, https, ftp or ftps prefix. - [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]+ # All URI chars except "&" (normal*). - (?: # Either on a "&" or at the end of URI. - (?! # Allow a "&" char only if not start of an... - &(?:gt|\#0*62|\#x0*3e); # HTML ">" entity, or - | &(?:amp|apos|quot|\#0*3[49]|\#x0*2[27]); # a [&\'"] entity if - [.!&\',:?;]? # followed by optional punctuation then - (?:[^a-z0-9\-._~!$&\'()*+,;=:\/?#[\]@%]|$) # a non-URI char or EOS. - ) & # If neg-assertion true, match "&" (special). - [a-z0-9\-._~!$\'()*+,;=:\/?#[\]@%]* # More non-& URI chars (normal*). - )* # Unroll-the-loop (special normal*)*. - [a-z0-9\-_~$()*+=\/#[\]@%] # Last char can\'t be [.!&\',;:?] - ) # End $14. Other non-delimited URL. - /imx'; - - $url_replace = '$1$4$7$10$13$2$5$8$11$14$3$6$9$12'; - - return preg_replace($url_pattern, $url_replace, $text); - } - - /** - * Callback for the preg_replace in the linkify() method. - * - * Part of the LinkifyURL Project - * - * @param array $matches Matches from the preg_ function - * @return string - */ - protected static function linkifyCallback($matches) - { - if (isset($matches[2])) { - return $matches[2]; - } - - return self::linkifyRegex($matches[1]); - } - - /** - * Return the current URL. - * - * @return string - */ - public static function get_current_url() - { - $url = ''; - - // Check to see if it's over https - $is_https = self::is_https(); - if ($is_https) { - $url .= 'https://'; - } else { - $url .= 'http://'; - } - - // Was a username or password passed? - if (isset($_SERVER['PHP_AUTH_USER'])) { - $url .= $_SERVER['PHP_AUTH_USER']; - - if (isset($_SERVER['PHP_AUTH_PW'])) { - $url .= ':' . $_SERVER['PHP_AUTH_PW']; - } - - $url .= '@'; - } - - - // We want the user to stay on the same host they are currently on, - // but beware of security issues - // see http://shiflett.org/blog/2006/mar/server-name-versus-http-host - $url .= $_SERVER['HTTP_HOST']; - - $port = $_SERVER['SERVER_PORT']; - - // Is it on a non standard port? - if ($is_https && ($port != 443)) { - $url .= ':' . $_SERVER['SERVER_PORT']; - } elseif (!$is_https && ($port != 80)) { - $url .= ':' . $_SERVER['SERVER_PORT']; - } - - // Get the rest of the URL - if (!isset($_SERVER['REQUEST_URI'])) { - // Microsoft IIS doesn't set REQUEST_URI by default - $url .= $_SERVER['PHP_SELF']; - - if (isset($_SERVER['QUERY_STRING'])) { - $url .= '?' . $_SERVER['QUERY_STRING']; - } - } else { - $url .= $_SERVER['REQUEST_URI']; - } - - return $url; - } - - /** - * Returns the IP address of the client. - * - * @param boolean $trust_proxy_headers Whether or not to trust the - * proxy headers HTTP_CLIENT_IP - * and HTTP_X_FORWARDED_FOR. ONLY - * use if your server is behind a - * proxy that sets these values - * @return string - */ - public static function get_client_ip($trust_proxy_headers = false) - { - if (!$trust_proxy_headers) { - return $_SERVER['REMOTE_ADDR']; - } - - if (!empty($_SERVER['HTTP_CLIENT_IP'])) { - $ip = $_SERVER['HTTP_CLIENT_IP']; - } elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) { - $ip = $_SERVER['HTTP_X_FORWARDED_FOR']; - } else { - $ip = $_SERVER['REMOTE_ADDR']; - } - - return $ip; - } - - /** - * Truncate a string to a specified length without cutting a word off. - * - * @param string $string The string to truncate - * @param integer $length The length to truncate the string to - * @param string $append Text to append to the string IF it gets - * truncated, defaults to '...' - * @return string - */ - public static function safe_truncate($string, $length, $append = '...') - { - $ret = substr($string, 0, $length); - $last_space = strrpos($ret, ' '); - - if ($last_space !== false && $string != $ret) { - $ret = substr($ret, 0, $last_space); - } - - if ($ret != $string) { - $ret .= $append; - } - - return $ret; - } - - - /** - * Truncate the string to given length of characters. - * - * @param string $string The variable to truncate - * @param integer $limit The length to truncate the string to - * @param string $append Text to append to the string IF it gets - * truncated, defaults to '...' - * @return string - */ - public static function limit_characters($string, $limit = 100, $append = '...') - { - if (mb_strlen($string) <= $limit) { - return $string; - } - - return rtrim(mb_substr($string, 0, $limit, 'UTF-8')) . $append; - } - - /** - * Truncate the string to given length of words. - * - * @param $string - * @param $limit - * @param string $append - * @return string - */ - public static function limit_words($string, $limit = 100, $append = '...') - { - preg_match('/^\s*+(?:\S++\s*+){1,' . $limit . '}/u', $string, $matches); - - if (!isset($matches[0]) || strlen($string) === strlen($matches[0])) { - return $string; - } - - return rtrim($matches[0]).$append; - } - - /** - * Returns the ordinal version of a number (appends th, st, nd, rd). - * - * @param string $number The number to append an ordinal suffix to - * @return string - */ - public static function ordinal($number) - { - $test_c = abs($number) % 10; - $ext = ((abs($number) % 100 < 21 && abs($number) % 100 > 4) ? 'th' : (($test_c < 4) ? ($test_c < 3) ? ($test_c < 2) ? ($test_c < 1) ? 'th' : 'st' : 'nd' : 'rd' : 'th')); - - return $number . $ext; - } - - /** - * Returns the file permissions as a nice string, like -rw-r--r-- or false - * if the file is not found. - * - * @param string $file The name of the file to get permissions form - * @param int $perms Numerical value of permissions to display as text. - * @return string - */ - public static function full_permissions($file, $perms = null) - { - if (is_null($perms)) { - if (!file_exists($file)) { - return false; - } - $perms = fileperms($file); - } - - if (($perms & 0xC000) == 0xC000) { - // Socket - $info = 's'; - } elseif (($perms & 0xA000) == 0xA000) { - // Symbolic Link - $info = 'l'; - } elseif (($perms & 0x8000) == 0x8000) { - // Regular - $info = '-'; - } elseif (($perms & 0x6000) == 0x6000) { - // Block special - $info = 'b'; - } elseif (($perms & 0x4000) == 0x4000) { - // Directory - $info = 'd'; - } elseif (($perms & 0x2000) == 0x2000) { - // Character special - $info = 'c'; - } elseif (($perms & 0x1000) == 0x1000) { - // FIFO pipe - $info = 'p'; - } else { - // Unknown - $info = 'u'; - } - - // Owner - $info .= (($perms & 0x0100) ? 'r' : '-'); - $info .= (($perms & 0x0080) ? 'w' : '-'); - $info .= (($perms & 0x0040) ? - (($perms & 0x0800) ? 's' : 'x') : - (($perms & 0x0800) ? 'S' : '-')); - - // Group - $info .= (($perms & 0x0020) ? 'r' : '-'); - $info .= (($perms & 0x0010) ? 'w' : '-'); - $info .= (($perms & 0x0008) ? - (($perms & 0x0400) ? 's' : 'x') : - (($perms & 0x0400) ? 'S' : '-')); - - // World - $info .= (($perms & 0x0004) ? 'r' : '-'); - $info .= (($perms & 0x0002) ? 'w' : '-'); - $info .= (($perms & 0x0001) ? - (($perms & 0x0200) ? 't' : 'x') : - (($perms & 0x0200) ? 'T' : '-')); - - return $info; - } - - /** - * Returns the first element in an array. - * - * @param array $array - * @return mixed - */ - public static function array_first(array $array) - { - return reset($array); - } - - /** - * Returns the last element in an array. - * - * @param array $array - * @return mixed - */ - public static function array_last(array $array) - { - return end($array); - } - - /** - * Returns the first key in an array. - * - * @param array $array - * @return int|string - */ - public static function array_first_key(array $array) - { - reset($array); - - return key($array); - } - - /** - * Returns the last key in an array. - * - * @param array $array - * @return int|string - */ - public static function array_last_key(array $array) - { - end($array); - - return key($array); - } - - /** - * Flatten a multi-dimensional array into a one dimensional array. - * - * Contributed by Theodore R. Smith of PHP Experts, Inc. - * - * @param array $array The array to flatten - * @param boolean $preserve_keys Whether or not to preserve array keys. - * Keys from deeply nested arrays will - * overwrite keys from shallowy nested arrays - * @return array - */ - public static function array_flatten(array $array, $preserve_keys = true) - { - $flattened = array(); - - array_walk_recursive($array, function($value, $key) use (&$flattened, $preserve_keys) { - if ($preserve_keys && !is_int($key)) { - $flattened[$key] = $value; - } else { - $flattened[] = $value; - } - }); - - return $flattened; - } - - /** - * Accepts an array, and returns an array of values from that array as - * specified by $field. For example, if the array is full of objects - * and you call util::array_pluck($array, 'name'), the function will - * return an array of values from $array[]->name. - * - * @param array $array An array - * @param string $field The field to get values from - * @param boolean $preserve_keys Whether or not to preserve the - * array keys - * @param boolean $remove_nomatches If the field doesn't appear to be set, - * remove it from the array - * @return array - */ - public static function array_pluck(array $array, $field, $preserve_keys = true, $remove_nomatches = true) - { - $new_list = array(); - - foreach ($array as $key => $value) { - if (is_object($value)) { - if (isset($value->{$field})) { - if ($preserve_keys) { - $new_list[$key] = $value->{$field}; - } else { - $new_list[] = $value->{$field}; - } - } elseif (!$remove_nomatches) { - $new_list[$key] = $value; - } - } else { - if (isset($value[$field])) { - if ($preserve_keys) { - $new_list[$key] = $value[$field]; - } else { - $new_list[] = $value[$field]; - } - } elseif (!$remove_nomatches) { - $new_list[$key] = $value; - } - } - } - - return $new_list; - } - - /** - * Searches for a given value in an array of arrays, objects and scalar - * values. You can optionally specify a field of the nested arrays and - * objects to search in. - * - * @param array $array The array to search - * @param scalar $search The value to search for - * @param string $field The field to search in, if not specified - * all fields will be searched - * @return boolean|scalar False on failure or the array key on success - */ - public static function array_search_deep(array $array, $search, $field = false) - { - // *grumbles* stupid PHP type system - $search = (string) $search; - - foreach ($array as $key => $elem) { - // *grumbles* stupid PHP type system - $key = (string) $key; - - if ($field) { - if (is_object($elem) && $elem->{$field} === $search) { - return $key; - } elseif (is_array($elem) && $elem[$field] === $search) { - return $key; - } elseif (is_scalar($elem) && $elem === $search) { - return $key; - } - } else { - if (is_object($elem)) { - $elem = (array) $elem; - - if (in_array($search, $elem)) { - return $key; - } - } elseif (is_array($elem) && in_array($search, $elem)) { - return $key; - } elseif (is_scalar($elem) && $elem === $search) { - return $key; - } - } - } - - return false; - } - - /** - * Returns an array containing all the elements of arr1 after applying - * the callback function to each one. - * - * @param string $callback Callback function to run for each - * element in each array - * @param array $array An array to run through the callback - * function - * @param boolean $on_nonscalar Whether or not to call the callback - * function on nonscalar values - * (Objects, resources, etc) - * @return array - */ - public static function array_map_deep(array $array, $callback, $on_nonscalar = false) - { - foreach ($array as $key => $value) { - if (is_array($value)) { - $args = array($value, $callback, $on_nonscalar); - $array[$key] = call_user_func_array(array(__CLASS__, __FUNCTION__), $args); - } elseif (is_scalar($value) || $on_nonscalar) { - $array[$key] = call_user_func($callback, $value); - } - } - - return $array; - } - - public static function array_clean(array $array) - { - return array_filter($array); - } - - /** - * Wrapper to prevent errors if the user doesn't have the mbstring - * extension installed. - * - * @param string $encoding - * @return string - */ - protected static function mbInternalEncoding($encoding = null) - { - if (function_exists('mb_internal_encoding')) { - return $encoding ? mb_internal_encoding($encoding) : mb_internal_encoding(); - } - - // @codeCoverageIgnoreStart - return 'UTF-8'; - // @codeCoverageIgnoreEnd - } - - /** - * Set the writable bit on a file to the minimum value that allows the user - * running PHP to write to it. - * - * @param string $filename The filename to set the writable bit on - * @param boolean $writable Whether to make the file writable or not - * @return boolean - */ - public static function set_writable($filename, $writable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($writable) { - // Set only the user writable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0200); - } - - // Set only the group writable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0220); - } - - // Set the world writable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0222); - } else { - // Set only the user writable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0222) ^ 0222); - } - - // Set only the group writable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0222) ^ 0022); - } - - // Set the world writable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0222) ^ 0002); - } - } - - /** - * Set the readable bit on a file to the minimum value that allows the user - * running PHP to read to it. - * - * @param string $filename The filename to set the readable bit on - * @param boolean $readable Whether to make the file readable or not - * @return boolean - */ - public static function set_readable($filename, $readable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($readable) { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0400); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0440); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0444); - } else { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0444) ^ 0444); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0444) ^ 0044); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0444) ^ 0004); - } - } - - /** - * Set the executable bit on a file to the minimum value that allows the - * user running PHP to read to it. - * - * @param string $filename The filename to set the executable bit on - * @param boolean $executable Whether to make the file executable or not - * @return boolean - */ - public static function set_executable($filename, $executable = true) - { - $stat = @stat($filename); - - if ($stat === false) { - return false; - } - - // We're on Windows - if (strncasecmp(PHP_OS, 'WIN', 3) === 0) { - return true; - } - - list($myuid, $mygid) = array(posix_geteuid(), posix_getgid()); - - if ($executable) { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, fileperms($filename) | 0100); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, fileperms($filename) | 0110); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, fileperms($filename) | 0111); - } else { - // Set only the user readable bit (file is owned by us) - if ($stat['uid'] == $myuid) { - return chmod($filename, (fileperms($filename) | 0111) ^ 0111); - } - - // Set only the group readable bit (file group is the same as us) - if ($stat['gid'] == $mygid) { - return chmod($filename, (fileperms($filename) | 0111) ^ 0011); - } - - // Set the world readable bit (file isn't owned or grouped by us) - return chmod($filename, (fileperms($filename) | 0111) ^ 0001); - } - } - - /** - * Returns size of a given directory in bytes. - * - * @param string $dir - * @return integer - */ - public static function directory_size($dir) - { - $size = 0; - foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $file => $key) { - if ($key->isFile()) { - $size += $key->getSize(); - } - } - return $size; - } - - /** - * Returns a home directory of current user. - * - * @return string - */ - public static function get_user_directory() - { - if (isset($_SERVER['HOMEDRIVE'])) return $_SERVER['HOMEDRIVE'] . $_SERVER['HOMEPATH']; - else return $_SERVER['HOME']; - } - - /** - * Returns all paths inside a directory. - * - * @param string $dir - * @return array - */ - public static function directory_contents($dir) - { - $contents = array(); - foreach(new \RecursiveIteratorIterator(new \RecursiveDirectoryIterator($dir, \FilesystemIterator::KEY_AS_PATHNAME | \FilesystemIterator::CURRENT_AS_FILEINFO | \FilesystemIterator::SKIP_DOTS)) as $pathname => $fi) { - $contents[] = $pathname; - } - natsort($contents); - return $contents; - } -} diff --git a/node_modules/npm-run-path/index.d.ts b/node_modules/npm-run-path/index.d.ts deleted file mode 100644 index af10d41..0000000 --- a/node_modules/npm-run-path/index.d.ts +++ /dev/null @@ -1,89 +0,0 @@ -declare namespace npmRunPath { - interface RunPathOptions { - /** - Working directory. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - PATH to be appended. Default: [`PATH`](https://github.com/sindresorhus/path-key). - - Set it to an empty string to exclude the default PATH. - */ - readonly path?: string; - - /** - Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. - - This can be either an absolute path or a path relative to the `cwd` option. - - @default process.execPath - */ - readonly execPath?: string; - } - - interface ProcessEnv { - [key: string]: string | undefined; - } - - interface EnvOptions { - /** - Working directory. - - @default process.cwd() - */ - readonly cwd?: string; - - /** - Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. - */ - readonly env?: ProcessEnv; - - /** - Path to the current Node.js executable. Its directory is pushed to the front of PATH. - - This can be either an absolute path or a path relative to the `cwd` option. - - @default process.execPath - */ - readonly execPath?: string; - } -} - -declare const npmRunPath: { - /** - Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries. - - @returns The augmented path string. - - @example - ``` - import * as childProcess from 'child_process'; - import npmRunPath = require('npm-run-path'); - - console.log(process.env.PATH); - //=> '/usr/local/bin' - - console.log(npmRunPath()); - //=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' - - // `foo` is a locally installed binary - childProcess.execFileSync('foo', { - env: npmRunPath.env() - }); - ``` - */ - (options?: npmRunPath.RunPathOptions): string; - - /** - @returns The augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. - */ - env(options?: npmRunPath.EnvOptions): npmRunPath.ProcessEnv; - - // TODO: Remove this for the next major release - default: typeof npmRunPath; -}; - -export = npmRunPath; diff --git a/node_modules/npm-run-path/index.js b/node_modules/npm-run-path/index.js deleted file mode 100644 index 8c94abc..0000000 --- a/node_modules/npm-run-path/index.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; -const path = require('path'); -const pathKey = require('path-key'); - -const npmRunPath = options => { - options = { - cwd: process.cwd(), - path: process.env[pathKey()], - execPath: process.execPath, - ...options - }; - - let previous; - let cwdPath = path.resolve(options.cwd); - const result = []; - - while (previous !== cwdPath) { - result.push(path.join(cwdPath, 'node_modules/.bin')); - previous = cwdPath; - cwdPath = path.resolve(cwdPath, '..'); - } - - // Ensure the running `node` binary is used - const execPathDir = path.resolve(options.cwd, options.execPath, '..'); - result.push(execPathDir); - - return result.concat(options.path).join(path.delimiter); -}; - -module.exports = npmRunPath; -// TODO: Remove this for the next major release -module.exports.default = npmRunPath; - -module.exports.env = options => { - options = { - env: process.env, - ...options - }; - - const env = {...options.env}; - const path = pathKey({env}); - - options.path = env[path]; - env[path] = module.exports(options); - - return env; -}; diff --git a/node_modules/npm-run-path/license b/node_modules/npm-run-path/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/npm-run-path/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm-run-path/package.json b/node_modules/npm-run-path/package.json deleted file mode 100644 index feb8c00..0000000 --- a/node_modules/npm-run-path/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "npm-run-path", - "version": "4.0.1", - "description": "Get your PATH prepended with locally installed binaries", - "license": "MIT", - "repository": "sindresorhus/npm-run-path", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "npm", - "run", - "path", - "package", - "bin", - "binary", - "binaries", - "script", - "cli", - "command-line", - "execute", - "executable" - ], - "dependencies": { - "path-key": "^3.0.0" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/npm-run-path/readme.md b/node_modules/npm-run-path/readme.md deleted file mode 100644 index 557fbeb..0000000 --- a/node_modules/npm-run-path/readme.md +++ /dev/null @@ -1,115 +0,0 @@ -# npm-run-path [![Build Status](https://travis-ci.org/sindresorhus/npm-run-path.svg?branch=master)](https://travis-ci.org/sindresorhus/npm-run-path) - -> Get your [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) prepended with locally installed binaries - -In [npm run scripts](https://docs.npmjs.com/cli/run-script) you can execute locally installed binaries by name. This enables the same outside npm. - - -## Install - -``` -$ npm install npm-run-path -``` - - -## Usage - -```js -const childProcess = require('child_process'); -const npmRunPath = require('npm-run-path'); - -console.log(process.env.PATH); -//=> '/usr/local/bin' - -console.log(npmRunPath()); -//=> '/Users/sindresorhus/dev/foo/node_modules/.bin:/Users/sindresorhus/dev/node_modules/.bin:/Users/sindresorhus/node_modules/.bin:/Users/node_modules/.bin:/node_modules/.bin:/usr/local/bin' - -// `foo` is a locally installed binary -childProcess.execFileSync('foo', { - env: npmRunPath.env() -}); -``` - - -## API - -### npmRunPath(options?) - -Returns the augmented path string. - -#### options - -Type: `object` - -##### cwd - -Type: `string`
-Default: `process.cwd()` - -Working directory. - -##### path - -Type: `string`
-Default: [`PATH`](https://github.com/sindresorhus/path-key) - -PATH to be appended.
-Set it to an empty string to exclude the default PATH. - -##### execPath - -Type: `string`
-Default: `process.execPath` - -Path to the current Node.js executable. Its directory is pushed to the front of PATH. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - -### npmRunPath.env(options?) - -Returns the augmented [`process.env`](https://nodejs.org/api/process.html#process_process_env) object. - -#### options - -Type: `object` - -##### cwd - -Type: `string`
-Default: `process.cwd()` - -Working directory. - -##### env - -Type: `Object` - -Accepts an object of environment variables, like `process.env`, and modifies the PATH using the correct [PATH key](https://github.com/sindresorhus/path-key). Use this if you're modifying the PATH for use in the `child_process` options. - -##### execPath - -Type: `string`
-Default: `process.execPath` - -Path to the Node.js executable to use in child processes if that is different from the current one. Its directory is pushed to the front of PATH. - -This can be either an absolute path or a path relative to the [`cwd` option](#cwd). - - -## Related - -- [npm-run-path-cli](https://github.com/sindresorhus/npm-run-path-cli) - CLI for this module -- [execa](https://github.com/sindresorhus/execa) - Execute a locally installed binary - - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/onetime/index.d.ts b/node_modules/onetime/index.d.ts deleted file mode 100644 index ea84cab..0000000 --- a/node_modules/onetime/index.d.ts +++ /dev/null @@ -1,64 +0,0 @@ -declare namespace onetime { - interface Options { - /** - Throw an error when called more than once. - - @default false - */ - throw?: boolean; - } -} - -declare const onetime: { - /** - Ensure a function is only called once. When called multiple times it will return the return value from the first call. - - @param fn - Function that should only be called once. - @returns A function that only calls `fn` once. - - @example - ``` - import onetime = require('onetime'); - - let i = 0; - - const foo = onetime(() => ++i); - - foo(); //=> 1 - foo(); //=> 1 - foo(); //=> 1 - - onetime.callCount(foo); //=> 3 - ``` - */ - ( - fn: (...arguments: ArgumentsType) => ReturnType, - options?: onetime.Options - ): (...arguments: ArgumentsType) => ReturnType; - - /** - Get the number of times `fn` has been called. - - @param fn - Function to get call count from. - @returns A number representing how many times `fn` has been called. - - @example - ``` - import onetime = require('onetime'); - - const foo = onetime(() => {}); - foo(); - foo(); - foo(); - - console.log(onetime.callCount(foo)); - //=> 3 - ``` - */ - callCount(fn: (...arguments: any[]) => unknown): number; - - // TODO: Remove this for the next major release - default: typeof onetime; -}; - -export = onetime; diff --git a/node_modules/onetime/index.js b/node_modules/onetime/index.js deleted file mode 100644 index 99c5fc1..0000000 --- a/node_modules/onetime/index.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict'; -const mimicFn = require('mimic-fn'); - -const calledFunctions = new WeakMap(); - -const onetime = (function_, options = {}) => { - if (typeof function_ !== 'function') { - throw new TypeError('Expected a function'); - } - - let returnValue; - let callCount = 0; - const functionName = function_.displayName || function_.name || ''; - - const onetime = function (...arguments_) { - calledFunctions.set(onetime, ++callCount); - - if (callCount === 1) { - returnValue = function_.apply(this, arguments_); - function_ = null; - } else if (options.throw === true) { - throw new Error(`Function \`${functionName}\` can only be called once`); - } - - return returnValue; - }; - - mimicFn(onetime, function_); - calledFunctions.set(onetime, callCount); - - return onetime; -}; - -module.exports = onetime; -// TODO: Remove this for the next major release -module.exports.default = onetime; - -module.exports.callCount = function_ => { - if (!calledFunctions.has(function_)) { - throw new Error(`The given function \`${function_.name}\` is not wrapped by the \`onetime\` package`); - } - - return calledFunctions.get(function_); -}; diff --git a/node_modules/onetime/license b/node_modules/onetime/license deleted file mode 100644 index fa7ceba..0000000 --- a/node_modules/onetime/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/onetime/package.json b/node_modules/onetime/package.json deleted file mode 100644 index 54caea5..0000000 --- a/node_modules/onetime/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "onetime", - "version": "5.1.2", - "description": "Ensure a function is only called once", - "license": "MIT", - "repository": "sindresorhus/onetime", - "funding": "https://github.com/sponsors/sindresorhus", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "https://sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "once", - "function", - "one", - "onetime", - "func", - "fn", - "single", - "call", - "called", - "prevent" - ], - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.1", - "xo": "^0.24.0" - } -} diff --git a/node_modules/onetime/readme.md b/node_modules/onetime/readme.md deleted file mode 100644 index 2d133d3..0000000 --- a/node_modules/onetime/readme.md +++ /dev/null @@ -1,94 +0,0 @@ -# onetime [![Build Status](https://travis-ci.com/sindresorhus/onetime.svg?branch=master)](https://travis-ci.com/github/sindresorhus/onetime) - -> Ensure a function is only called once - -When called multiple times it will return the return value from the first call. - -*Unlike the module [once](https://github.com/isaacs/once), this one isn't naughty and extending `Function.prototype`.* - -## Install - -``` -$ npm install onetime -``` - -## Usage - -```js -const onetime = require('onetime'); - -let i = 0; - -const foo = onetime(() => ++i); - -foo(); //=> 1 -foo(); //=> 1 -foo(); //=> 1 - -onetime.callCount(foo); //=> 3 -``` - -```js -const onetime = require('onetime'); - -const foo = onetime(() => {}, {throw: true}); - -foo(); - -foo(); -//=> Error: Function `foo` can only be called once -``` - -## API - -### onetime(fn, options?) - -Returns a function that only calls `fn` once. - -#### fn - -Type: `Function` - -Function that should only be called once. - -#### options - -Type: `object` - -##### throw - -Type: `boolean`\ -Default: `false` - -Throw an error when called more than once. - -### onetime.callCount(fn) - -Returns a number representing how many times `fn` has been called. - -Note: It throws an error if you pass in a function that is not wrapped by `onetime`. - -```js -const onetime = require('onetime'); - -const foo = onetime(() => {}); - -foo(); -foo(); -foo(); - -console.log(onetime.callCount(foo)); -//=> 3 -``` - -#### fn - -Type: `Function` - -Function to get call count from. - -## onetime for enterprise - -Available as part of the Tidelift Subscription. - -The maintainers of onetime and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-onetime?utm_source=npm-onetime&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/path-key/index.d.ts b/node_modules/path-key/index.d.ts deleted file mode 100644 index 7c575d1..0000000 --- a/node_modules/path-key/index.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -/// - -declare namespace pathKey { - interface Options { - /** - Use a custom environment variables object. Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env). - */ - readonly env?: {[key: string]: string | undefined}; - - /** - Get the PATH key for a specific platform. Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform). - */ - readonly platform?: NodeJS.Platform; - } -} - -declare const pathKey: { - /** - Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform. - - @example - ``` - import pathKey = require('path-key'); - - const key = pathKey(); - //=> 'PATH' - - const PATH = process.env[key]; - //=> '/usr/local/bin:/usr/bin:/bin' - ``` - */ - (options?: pathKey.Options): string; - - // TODO: Remove this for the next major release, refactor the whole definition to: - // declare function pathKey(options?: pathKey.Options): string; - // export = pathKey; - default: typeof pathKey; -}; - -export = pathKey; diff --git a/node_modules/path-key/index.js b/node_modules/path-key/index.js deleted file mode 100644 index 0cf6415..0000000 --- a/node_modules/path-key/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -const pathKey = (options = {}) => { - const environment = options.env || process.env; - const platform = options.platform || process.platform; - - if (platform !== 'win32') { - return 'PATH'; - } - - return Object.keys(environment).reverse().find(key => key.toUpperCase() === 'PATH') || 'Path'; -}; - -module.exports = pathKey; -// TODO: Remove this for the next major release -module.exports.default = pathKey; diff --git a/node_modules/path-key/license b/node_modules/path-key/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/path-key/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/path-key/package.json b/node_modules/path-key/package.json deleted file mode 100644 index c8cbd38..0000000 --- a/node_modules/path-key/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "path-key", - "version": "3.1.1", - "description": "Get the PATH environment variable key cross-platform", - "license": "MIT", - "repository": "sindresorhus/path-key", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "path", - "key", - "environment", - "env", - "variable", - "var", - "get", - "cross-platform", - "windows" - ], - "devDependencies": { - "@types/node": "^11.13.0", - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/path-key/readme.md b/node_modules/path-key/readme.md deleted file mode 100644 index a9052d7..0000000 --- a/node_modules/path-key/readme.md +++ /dev/null @@ -1,61 +0,0 @@ -# path-key [![Build Status](https://travis-ci.org/sindresorhus/path-key.svg?branch=master)](https://travis-ci.org/sindresorhus/path-key) - -> Get the [PATH](https://en.wikipedia.org/wiki/PATH_(variable)) environment variable key cross-platform - -It's usually `PATH`, but on Windows it can be any casing like `Path`... - - -## Install - -``` -$ npm install path-key -``` - - -## Usage - -```js -const pathKey = require('path-key'); - -const key = pathKey(); -//=> 'PATH' - -const PATH = process.env[key]; -//=> '/usr/local/bin:/usr/bin:/bin' -``` - - -## API - -### pathKey(options?) - -#### options - -Type: `object` - -##### env - -Type: `object`
-Default: [`process.env`](https://nodejs.org/api/process.html#process_process_env) - -Use a custom environment variables object. - -#### platform - -Type: `string`
-Default: [`process.platform`](https://nodejs.org/api/process.html#process_process_platform) - -Get the PATH key for a specific platform. - - ---- - -
- - Get professional support for this package with a Tidelift subscription - -
- - Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. -
-
diff --git a/node_modules/picomatch/CHANGELOG.md b/node_modules/picomatch/CHANGELOG.md deleted file mode 100644 index 8ccc6c1..0000000 --- a/node_modules/picomatch/CHANGELOG.md +++ /dev/null @@ -1,136 +0,0 @@ -# Release history - -**All notable changes to this project will be documented in this file.** - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -
- Guiding Principles - -- Changelogs are for humans, not machines. -- There should be an entry for every single version. -- The same types of changes should be grouped. -- Versions and sections should be linkable. -- The latest version comes first. -- The release date of each versions is displayed. -- Mention whether you follow Semantic Versioning. - -
- -
- Types of changes - -Changelog entries are classified using the following labels _(from [keep-a-changelog](http://keepachangelog.com/)_): - -- `Added` for new features. -- `Changed` for changes in existing functionality. -- `Deprecated` for soon-to-be removed features. -- `Removed` for now removed features. -- `Fixed` for any bug fixes. -- `Security` in case of vulnerabilities. - -
- -## 2.3.1 (2022-01-02) - -### Fixed - -* Fixes bug when a pattern containing an expression after the closing parenthesis (`/!(*.d).{ts,tsx}`) was incorrectly converted to regexp ([9f241ef](https://github.com/micromatch/picomatch/commit/9f241ef)). - -### Changed - -* Some documentation improvements ([f81d236](https://github.com/micromatch/picomatch/commit/f81d236), [421e0e7](https://github.com/micromatch/picomatch/commit/421e0e7)). - -## 2.3.0 (2021-05-21) - -### Fixed - -* Fixes bug where file names with two dots were not being matched consistently with negation extglobs containing a star ([56083ef](https://github.com/micromatch/picomatch/commit/56083ef)) - -## 2.2.3 (2021-04-10) - -### Fixed - -* Do not skip pattern seperator for square brackets ([fb08a30](https://github.com/micromatch/picomatch/commit/fb08a30)). -* Set negatedExtGlob also if it does not span the whole pattern ([032e3f5](https://github.com/micromatch/picomatch/commit/032e3f5)). - -## 2.2.2 (2020-03-21) - -### Fixed - -* Correctly handle parts of the pattern after parentheses in the `scan` method ([e15b920](https://github.com/micromatch/picomatch/commit/e15b920)). - -## 2.2.1 (2020-01-04) - -* Fixes [#49](https://github.com/micromatch/picomatch/issues/49), so that braces with no sets or ranges are now propertly treated as literals. - -## 2.2.0 (2020-01-04) - -* Disable fastpaths mode for the parse method ([5b8d33f](https://github.com/micromatch/picomatch/commit/5b8d33f)) -* Add `tokens`, `slashes`, and `parts` to the object returned by `picomatch.scan()`. - -## 2.1.0 (2019-10-31) - -* add benchmarks for scan ([4793b92](https://github.com/micromatch/picomatch/commit/4793b92)) -* Add eslint object-curly-spacing rule ([707c650](https://github.com/micromatch/picomatch/commit/707c650)) -* Add prefer-const eslint rule ([5c7501c](https://github.com/micromatch/picomatch/commit/5c7501c)) -* Add support for nonegate in scan API ([275c9b9](https://github.com/micromatch/picomatch/commit/275c9b9)) -* Change lets to consts. Move root import up. ([4840625](https://github.com/micromatch/picomatch/commit/4840625)) -* closes https://github.com/micromatch/picomatch/issues/21 ([766bcb0](https://github.com/micromatch/picomatch/commit/766bcb0)) -* Fix "Extglobs" table in readme ([eb19da8](https://github.com/micromatch/picomatch/commit/eb19da8)) -* fixes https://github.com/micromatch/picomatch/issues/20 ([9caca07](https://github.com/micromatch/picomatch/commit/9caca07)) -* fixes https://github.com/micromatch/picomatch/issues/26 ([fa58f45](https://github.com/micromatch/picomatch/commit/fa58f45)) -* Lint test ([d433a34](https://github.com/micromatch/picomatch/commit/d433a34)) -* lint unit tests ([0159b55](https://github.com/micromatch/picomatch/commit/0159b55)) -* Make scan work with noext ([6c02e03](https://github.com/micromatch/picomatch/commit/6c02e03)) -* minor linting ([c2a2b87](https://github.com/micromatch/picomatch/commit/c2a2b87)) -* minor parser improvements ([197671d](https://github.com/micromatch/picomatch/commit/197671d)) -* remove eslint since it... ([07876fa](https://github.com/micromatch/picomatch/commit/07876fa)) -* remove funding file ([8ebe96d](https://github.com/micromatch/picomatch/commit/8ebe96d)) -* Remove unused funks ([cbc6d54](https://github.com/micromatch/picomatch/commit/cbc6d54)) -* Run eslint during pretest, fix existing eslint findings ([0682367](https://github.com/micromatch/picomatch/commit/0682367)) -* support `noparen` in scan ([3d37569](https://github.com/micromatch/picomatch/commit/3d37569)) -* update changelog ([7b34e77](https://github.com/micromatch/picomatch/commit/7b34e77)) -* update travis ([777f038](https://github.com/micromatch/picomatch/commit/777f038)) -* Use eslint-disable-next-line instead of eslint-disable ([4e7c1fd](https://github.com/micromatch/picomatch/commit/4e7c1fd)) - -## 2.0.7 (2019-05-14) - -* 2.0.7 ([9eb9a71](https://github.com/micromatch/picomatch/commit/9eb9a71)) -* supports lookbehinds ([1f63f7e](https://github.com/micromatch/picomatch/commit/1f63f7e)) -* update .verb.md file with typo change ([2741279](https://github.com/micromatch/picomatch/commit/2741279)) -* fix: typo in README ([0753e44](https://github.com/micromatch/picomatch/commit/0753e44)) - -## 2.0.4 (2019-04-10) - -### Fixed - -- Readme link [fixed](https://github.com/micromatch/picomatch/pull/13/commits/a96ab3aa2b11b6861c23289964613d85563b05df) by @danez. -- `options.capture` now works as expected when fastpaths are enabled. See https://github.com/micromatch/picomatch/pull/12/commits/26aefd71f1cfaf95c37f1c1fcab68a693b037304. Thanks to @DrPizza. - -## 2.0.0 (2019-04-10) - -### Added - -- Adds support for `options.onIgnore`. See the readme for details -- Adds support for `options.onResult`. See the readme for details - -### Breaking changes - -- The unixify option was renamed to `windows` -- caching and all related options and methods have been removed - -## 1.0.0 (2018-11-05) - -- adds `.onMatch` option -- improvements to `.scan` method -- numerous improvements and optimizations for matching and parsing -- better windows path handling - -## 0.1.0 - 2017-04-13 - -First release. - - -[keep-a-changelog]: https://github.com/olivierlacan/keep-a-changelog diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE deleted file mode 100644 index 3608dca..0000000 --- a/node_modules/picomatch/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2017-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/picomatch/README.md b/node_modules/picomatch/README.md deleted file mode 100644 index b0526e2..0000000 --- a/node_modules/picomatch/README.md +++ /dev/null @@ -1,708 +0,0 @@ -

Picomatch

- -

- -version - - -test status - - -coverage status - - -downloads - -

- -
-
- -

-Blazing fast and accurate glob matcher written in JavaScript.
-No dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions. -

- -
-
- -## Why picomatch? - -* **Lightweight** - No dependencies -* **Minimal** - Tiny API surface. Main export is a function that takes a glob pattern and returns a matcher function. -* **Fast** - Loads in about 2ms (that's several times faster than a [single frame of a HD movie](http://www.endmemo.com/sconvert/framespersecondframespermillisecond.php) at 60fps) -* **Performant** - Use the returned matcher function to speed up repeat matching (like when watching files) -* **Accurate matching** - Using wildcards (`*` and `?`), globstars (`**`) for nested directories, [advanced globbing](#advanced-globbing) with extglobs, braces, and POSIX brackets, and support for escaping special characters with `\` or quotes. -* **Well tested** - Thousands of unit tests - -See the [library comparison](#library-comparisons) to other libraries. - -
-
- -## Table of Contents - -
Click to expand - -- [Install](#install) -- [Usage](#usage) -- [API](#api) - * [picomatch](#picomatch) - * [.test](#test) - * [.matchBase](#matchbase) - * [.isMatch](#ismatch) - * [.parse](#parse) - * [.scan](#scan) - * [.compileRe](#compilere) - * [.makeRe](#makere) - * [.toRegex](#toregex) -- [Options](#options) - * [Picomatch options](#picomatch-options) - * [Scan Options](#scan-options) - * [Options Examples](#options-examples) -- [Globbing features](#globbing-features) - * [Basic globbing](#basic-globbing) - * [Advanced globbing](#advanced-globbing) - * [Braces](#braces) - * [Matching special characters as literals](#matching-special-characters-as-literals) -- [Library Comparisons](#library-comparisons) -- [Benchmarks](#benchmarks) -- [Philosophies](#philosophies) -- [About](#about) - * [Author](#author) - * [License](#license) - -_(TOC generated by [verb](https://github.com/verbose/verb) using [markdown-toc](https://github.com/jonschlinkert/markdown-toc))_ - -
- -
-
- -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -npm install --save picomatch -``` - -
- -## Usage - -The main export is a function that takes a glob pattern and an options object and returns a function for matching strings. - -```js -const pm = require('picomatch'); -const isMatch = pm('*.js'); - -console.log(isMatch('abcd')); //=> false -console.log(isMatch('a.js')); //=> true -console.log(isMatch('a.md')); //=> false -console.log(isMatch('a/b.js')); //=> false -``` - -
- -## API - -### [picomatch](lib/picomatch.js#L32) - -Creates a matcher function from one or more glob patterns. The returned function takes a string to match as its first argument, and returns true if the string is a match. The returned matcher function also takes a boolean as the second argument that, when true, returns an object with additional information. - -**Params** - -* `globs` **{String|Array}**: One or more glob patterns. -* `options` **{Object=}** -* `returns` **{Function=}**: Returns a matcher function. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch(glob[, options]); - -const isMatch = picomatch('*.!(*a)'); -console.log(isMatch('a.a')); //=> false -console.log(isMatch('a.b')); //=> true -``` - -### [.test](lib/picomatch.js#L117) - -Test `input` with the given `regex`. This is used by the main `picomatch()` function to test the input string. - -**Params** - -* `input` **{String}**: String to test. -* `regex` **{RegExp}** -* `returns` **{Object}**: Returns an object with matching info. - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.test(input, regex[, options]); - -console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); -// { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } -``` - -### [.matchBase](lib/picomatch.js#L161) - -Match the basename of a filepath. - -**Params** - -* `input` **{String}**: String to test. -* `glob` **{RegExp|String}**: Glob pattern or regex created by [.makeRe](#makeRe). -* `returns` **{Boolean}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.matchBase(input, glob[, options]); -console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true -``` - -### [.isMatch](lib/picomatch.js#L183) - -Returns true if **any** of the given glob `patterns` match the specified `string`. - -**Params** - -* **{String|Array}**: str The string to test. -* **{String|Array}**: patterns One or more glob patterns to use for matching. -* **{Object}**: See available [options](#options). -* `returns` **{Boolean}**: Returns true if any patterns match `str` - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.isMatch(string, patterns[, options]); - -console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true -console.log(picomatch.isMatch('a.a', 'b.*')); //=> false -``` - -### [.parse](lib/picomatch.js#L199) - -Parse a glob pattern to create the source string for a regular expression. - -**Params** - -* `pattern` **{String}** -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with useful properties and output to be used as a regex source string. - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.parse(pattern[, options]); -``` - -### [.scan](lib/picomatch.js#L231) - -Scan a glob pattern to separate the pattern into segments. - -**Params** - -* `input` **{String}**: Glob pattern to scan. -* `options` **{Object}** -* `returns` **{Object}**: Returns an object with - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.scan(input[, options]); - -const result = picomatch.scan('!./foo/*.js'); -console.log(result); -{ prefix: '!./', - input: '!./foo/*.js', - start: 3, - base: 'foo', - glob: '*.js', - isBrace: false, - isBracket: false, - isGlob: true, - isExtglob: false, - isGlobstar: false, - negated: true } -``` - -### [.compileRe](lib/picomatch.js#L245) - -Compile a regular expression from the `state` object returned by the -[parse()](#parse) method. - -**Params** - -* `state` **{Object}** -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Intended for implementors, this argument allows you to return the raw output from the parser. -* `returnState` **{Boolean}**: Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. -* `returns` **{RegExp}** - -### [.makeRe](lib/picomatch.js#L286) - -Create a regular expression from a parsed glob pattern. - -**Params** - -* `state` **{String}**: The object returned from the `.parse` method. -* `options` **{Object}** -* `returnOutput` **{Boolean}**: Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. -* `returnState` **{Boolean}**: Implementors may use this argument to return the state from the parsed glob with the returned regular expression. -* `returns` **{RegExp}**: Returns a regex created from the given pattern. - -**Example** - -```js -const picomatch = require('picomatch'); -const state = picomatch.parse('*.js'); -// picomatch.compileRe(state[, options]); - -console.log(picomatch.compileRe(state)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -### [.toRegex](lib/picomatch.js#L321) - -Create a regular expression from the given regex source string. - -**Params** - -* `source` **{String}**: Regular expression source string. -* `options` **{Object}** -* `returns` **{RegExp}** - -**Example** - -```js -const picomatch = require('picomatch'); -// picomatch.toRegex(source[, options]); - -const { output } = picomatch.parse('*.js'); -console.log(picomatch.toRegex(output)); -//=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ -``` - -
- -## Options - -### Picomatch options - -The following options may be used with the main `picomatch()` function or any of the methods on the picomatch API. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `basename` | `boolean` | `false` | If set, then patterns without slashes will be matched against the basename of the path if it contains slashes. For example, `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. | -| `bash` | `boolean` | `false` | Follow bash matching rules more strictly - disallows backslashes as escape characters, and treats single stars as globstars (`**`). | -| `capture` | `boolean` | `undefined` | Return regex matches in supporting methods. | -| `contains` | `boolean` | `undefined` | Allows glob to match any part of the given string(s). | -| `cwd` | `string` | `process.cwd()` | Current working directory. Used by `picomatch.split()` | -| `debug` | `boolean` | `undefined` | Debug regular expressions when an error is thrown. | -| `dot` | `boolean` | `false` | Enable dotfile matching. By default, dotfiles are ignored unless a `.` is explicitly defined in the pattern, or `options.dot` is true | -| `expandRange` | `function` | `undefined` | Custom function for expanding ranges in brace patterns, such as `{a..z}`. The function receives the range values as two arguments, and it must return a string to be used in the generated regex. It's recommended that returned strings be wrapped in parentheses. | -| `failglob` | `boolean` | `false` | Throws an error if no matches are found. Based on the bash option of the same name. | -| `fastpaths` | `boolean` | `true` | To speed up processing, full parsing is skipped for a handful common glob patterns. Disable this behavior by setting this option to `false`. | -| `flags` | `string` | `undefined` | Regex flags to use in the generated regex. If defined, the `nocase` option will be overridden. | -| [format](#optionsformat) | `function` | `undefined` | Custom function for formatting the returned string. This is useful for removing leading slashes, converting Windows paths to Posix paths, etc. | -| `ignore` | `array\|string` | `undefined` | One or more glob patterns for excluding strings that should not be matched from the result. | -| `keepQuotes` | `boolean` | `false` | Retain quotes in the generated regex, since quotes may also be used as an alternative to backslashes. | -| `literalBrackets` | `boolean` | `undefined` | When `true`, brackets in the glob pattern will be escaped so that only literal brackets will be matched. | -| `matchBase` | `boolean` | `false` | Alias for `basename` | -| `maxLength` | `boolean` | `65536` | Limit the max length of the input string. An error is thrown if the input string is longer than this value. | -| `nobrace` | `boolean` | `false` | Disable brace matching, so that `{a,b}` and `{1..3}` would be treated as literal characters. | -| `nobracket` | `boolean` | `undefined` | Disable matching with regex brackets. | -| `nocase` | `boolean` | `false` | Make matching case-insensitive. Equivalent to the regex `i` flag. Note that this option is overridden by the `flags` option. | -| `nodupes` | `boolean` | `true` | Deprecated, use `nounique` instead. This option will be removed in a future major release. By default duplicates are removed. Disable uniquification by setting this option to false. | -| `noext` | `boolean` | `false` | Alias for `noextglob` | -| `noextglob` | `boolean` | `false` | Disable support for matching with extglobs (like `+(a\|b)`) | -| `noglobstar` | `boolean` | `false` | Disable support for matching nested directories with globstars (`**`) | -| `nonegate` | `boolean` | `false` | Disable support for negating with leading `!` | -| `noquantifiers` | `boolean` | `false` | Disable support for regex quantifiers (like `a{1,2}`) and treat them as brace patterns to be expanded. | -| [onIgnore](#optionsonIgnore) | `function` | `undefined` | Function to be called on ignored items. | -| [onMatch](#optionsonMatch) | `function` | `undefined` | Function to be called on matched items. | -| [onResult](#optionsonResult) | `function` | `undefined` | Function to be called on all items, regardless of whether or not they are matched or ignored. | -| `posix` | `boolean` | `false` | Support POSIX character classes ("posix brackets"). | -| `posixSlashes` | `boolean` | `undefined` | Convert all slashes in file paths to forward slashes. This does not convert slashes in the glob pattern itself | -| `prepend` | `boolean` | `undefined` | String to prepend to the generated regex used for matching. | -| `regex` | `boolean` | `false` | Use regular expression rules for `+` (instead of matching literal `+`), and for stars that follow closing parentheses or brackets (as in `)*` and `]*`). | -| `strictBrackets` | `boolean` | `undefined` | Throw an error if brackets, braces, or parens are imbalanced. | -| `strictSlashes` | `boolean` | `undefined` | When true, picomatch won't match trailing slashes with single stars. | -| `unescape` | `boolean` | `undefined` | Remove backslashes preceding escaped characters in the glob pattern. By default, backslashes are retained. | -| `unixify` | `boolean` | `undefined` | Alias for `posixSlashes`, for backwards compatibility. | - -picomatch has automatic detection for regex positive and negative lookbehinds. If the pattern contains a negative lookbehind, you must be using Node.js >= 8.10 or else picomatch will throw an error. - -### Scan Options - -In addition to the main [picomatch options](#picomatch-options), the following options may also be used with the [.scan](#scan) method. - -| **Option** | **Type** | **Default value** | **Description** | -| --- | --- | --- | --- | -| `tokens` | `boolean` | `false` | When `true`, the returned object will include an array of tokens (objects), representing each path "segment" in the scanned glob pattern | -| `parts` | `boolean` | `false` | When `true`, the returned object will include an array of strings representing each path "segment" in the scanned glob pattern. This is automatically enabled when `options.tokens` is true | - -**Example** - -```js -const picomatch = require('picomatch'); -const result = picomatch.scan('!./foo/*.js', { tokens: true }); -console.log(result); -// { -// prefix: '!./', -// input: '!./foo/*.js', -// start: 3, -// base: 'foo', -// glob: '*.js', -// isBrace: false, -// isBracket: false, -// isGlob: true, -// isExtglob: false, -// isGlobstar: false, -// negated: true, -// maxDepth: 2, -// tokens: [ -// { value: '!./', depth: 0, isGlob: false, negated: true, isPrefix: true }, -// { value: 'foo', depth: 1, isGlob: false }, -// { value: '*.js', depth: 1, isGlob: true } -// ], -// slashes: [ 2, 6 ], -// parts: [ 'foo', '*.js' ] -// } -``` - -
- -### Options Examples - -#### options.expandRange - -**Type**: `function` - -**Default**: `undefined` - -Custom function for expanding ranges in brace patterns. The [fill-range](https://github.com/jonschlinkert/fill-range) library is ideal for this purpose, or you can use custom code to do whatever you need. - -**Example** - -The following example shows how to create a glob that matches a folder - -```js -const fill = require('fill-range'); -const regex = pm.makeRe('foo/{01..25}/bar', { - expandRange(a, b) { - return `(${fill(a, b, { toRegex: true })})`; - } -}); - -console.log(regex); -//=> /^(?:foo\/((?:0[1-9]|1[0-9]|2[0-5]))\/bar)$/ - -console.log(regex.test('foo/00/bar')) // false -console.log(regex.test('foo/01/bar')) // true -console.log(regex.test('foo/10/bar')) // true -console.log(regex.test('foo/22/bar')) // true -console.log(regex.test('foo/25/bar')) // true -console.log(regex.test('foo/26/bar')) // false -``` - -#### options.format - -**Type**: `function` - -**Default**: `undefined` - -Custom function for formatting strings before they're matched. - -**Example** - -```js -// strip leading './' from strings -const format = str => str.replace(/^\.\//, ''); -const isMatch = picomatch('foo/*.js', { format }); -console.log(isMatch('./foo/bar.js')); //=> true -``` - -#### options.onMatch - -```js -const onMatch = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onMatch }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onIgnore - -```js -const onIgnore = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onIgnore, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -#### options.onResult - -```js -const onResult = ({ glob, regex, input, output }) => { - console.log({ glob, regex, input, output }); -}; - -const isMatch = picomatch('*', { onResult, ignore: 'f*' }); -isMatch('foo'); -isMatch('bar'); -isMatch('baz'); -``` - -
-
- -## Globbing features - -* [Basic globbing](#basic-globbing) (Wildcard matching) -* [Advanced globbing](#advanced-globbing) (extglobs, posix brackets, brace matching) - -### Basic globbing - -| **Character** | **Description** | -| --- | --- | -| `*` | Matches any character zero or more times, excluding path separators. Does _not match_ path separators or hidden files or directories ("dotfiles"), unless explicitly enabled by setting the `dot` option to `true`. | -| `**` | Matches any character zero or more times, including path separators. Note that `**` will only match path separators (`/`, and `\\` on Windows) when they are the only characters in a path segment. Thus, `foo**/bar` is equivalent to `foo*/bar`, and `foo/a**b/bar` is equivalent to `foo/a*b/bar`, and _more than two_ consecutive stars in a glob path segment are regarded as _a single star_. Thus, `foo/***/bar` is equivalent to `foo/*/bar`. | -| `?` | Matches any character excluding path separators one time. Does _not match_ path separators or leading dots. | -| `[abc]` | Matches any characters inside the brackets. For example, `[abc]` would match the characters `a`, `b` or `c`, and nothing else. | - -#### Matching behavior vs. Bash - -Picomatch's matching features and expected results in unit tests are based on Bash's unit tests and the Bash 4.3 specification, with the following exceptions: - -* Bash will match `foo/bar/baz` with `*`. Picomatch only matches nested directories with `**`. -* Bash greedily matches with negated extglobs. For example, Bash 4.3 says that `!(foo)*` should match `foo` and `foobar`, since the trailing `*` bracktracks to match the preceding pattern. This is very memory-inefficient, and IMHO, also incorrect. Picomatch would return `false` for both `foo` and `foobar`. - -
- -### Advanced globbing - -* [extglobs](#extglobs) -* [POSIX brackets](#posix-brackets) -* [Braces](#brace-expansion) - -#### Extglobs - -| **Pattern** | **Description** | -| --- | --- | -| `@(pattern)` | Match _only one_ consecutive occurrence of `pattern` | -| `*(pattern)` | Match _zero or more_ consecutive occurrences of `pattern` | -| `+(pattern)` | Match _one or more_ consecutive occurrences of `pattern` | -| `?(pattern)` | Match _zero or **one**_ consecutive occurrences of `pattern` | -| `!(pattern)` | Match _anything but_ `pattern` | - -**Examples** - -```js -const pm = require('picomatch'); - -// *(pattern) matches ZERO or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// +(pattern) matches ONE or more of "pattern" -console.log(pm.isMatch('a', 'a*(z)')); // true -console.log(pm.isMatch('az', 'a*(z)')); // true -console.log(pm.isMatch('azzz', 'a*(z)')); // true - -// supports multiple extglobs -console.log(pm.isMatch('foo.bar', '!(foo).!(bar)')); // false - -// supports nested extglobs -console.log(pm.isMatch('foo.bar', '!(!(foo)).!(!(bar))')); // true -``` - -#### POSIX brackets - -POSIX classes are disabled by default. Enable this feature by setting the `posix` option to true. - -**Enable POSIX bracket support** - -```js -console.log(pm.makeRe('[[:word:]]+', { posix: true })); -//=> /^(?:(?=.)[A-Za-z0-9_]+\/?)$/ -``` - -**Supported POSIX classes** - -The following named POSIX bracket expressions are supported: - -* `[:alnum:]` - Alphanumeric characters, equ `[a-zA-Z0-9]` -* `[:alpha:]` - Alphabetical characters, equivalent to `[a-zA-Z]`. -* `[:ascii:]` - ASCII characters, equivalent to `[\\x00-\\x7F]`. -* `[:blank:]` - Space and tab characters, equivalent to `[ \\t]`. -* `[:cntrl:]` - Control characters, equivalent to `[\\x00-\\x1F\\x7F]`. -* `[:digit:]` - Numerical digits, equivalent to `[0-9]`. -* `[:graph:]` - Graph characters, equivalent to `[\\x21-\\x7E]`. -* `[:lower:]` - Lowercase letters, equivalent to `[a-z]`. -* `[:print:]` - Print characters, equivalent to `[\\x20-\\x7E ]`. -* `[:punct:]` - Punctuation and symbols, equivalent to `[\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~]`. -* `[:space:]` - Extended space characters, equivalent to `[ \\t\\r\\n\\v\\f]`. -* `[:upper:]` - Uppercase letters, equivalent to `[A-Z]`. -* `[:word:]` - Word characters (letters, numbers and underscores), equivalent to `[A-Za-z0-9_]`. -* `[:xdigit:]` - Hexadecimal digits, equivalent to `[A-Fa-f0-9]`. - -See the [Bash Reference Manual](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html) for more information. - -### Braces - -Picomatch does not do brace expansion. For [brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html) and advanced matching with braces, use [micromatch](https://github.com/micromatch/micromatch) instead. Picomatch has very basic support for braces. - -### Matching special characters as literals - -If you wish to match the following special characters in a filepath, and you want to use these characters in your glob pattern, they must be escaped with backslashes or quotes: - -**Special Characters** - -Some characters that are used for matching in regular expressions are also regarded as valid file path characters on some platforms. - -To match any of the following characters as literals: `$^*+?()[] - -Examples: - -```js -console.log(pm.makeRe('foo/bar \\(1\\)')); -console.log(pm.makeRe('foo/bar \\(1\\)')); -``` - -
-
- -## Library Comparisons - -The following table shows which features are supported by [minimatch](https://github.com/isaacs/minimatch), [micromatch](https://github.com/micromatch/micromatch), [picomatch](https://github.com/micromatch/picomatch), [nanomatch](https://github.com/micromatch/nanomatch), [extglob](https://github.com/micromatch/extglob), [braces](https://github.com/micromatch/braces), and [expand-brackets](https://github.com/micromatch/expand-brackets). - -| **Feature** | `minimatch` | `micromatch` | `picomatch` | `nanomatch` | `extglob` | `braces` | `expand-brackets` | -| --- | --- | --- | --- | --- | --- | --- | --- | -| Wildcard matching (`*?+`) | ✔ | ✔ | ✔ | ✔ | - | - | - | -| Advancing globbing | ✔ | ✔ | ✔ | - | - | - | - | -| Brace _matching_ | ✔ | ✔ | ✔ | - | - | ✔ | - | -| Brace _expansion_ | ✔ | ✔ | - | - | - | ✔ | - | -| Extglobs | partial | ✔ | ✔ | - | ✔ | - | - | -| Posix brackets | - | ✔ | ✔ | - | - | - | ✔ | -| Regular expression syntax | - | ✔ | ✔ | ✔ | ✔ | - | ✔ | -| File system operations | - | - | - | - | - | - | - | - -
-
- -## Benchmarks - -Performance comparison of picomatch and minimatch. - -``` -# .makeRe star - picomatch x 1,993,050 ops/sec ±0.51% (91 runs sampled) - minimatch x 627,206 ops/sec ±1.96% (87 runs sampled)) - -# .makeRe star; dot=true - picomatch x 1,436,640 ops/sec ±0.62% (91 runs sampled) - minimatch x 525,876 ops/sec ±0.60% (88 runs sampled) - -# .makeRe globstar - picomatch x 1,592,742 ops/sec ±0.42% (90 runs sampled) - minimatch x 962,043 ops/sec ±1.76% (91 runs sampled)d) - -# .makeRe globstars - picomatch x 1,615,199 ops/sec ±0.35% (94 runs sampled) - minimatch x 477,179 ops/sec ±1.33% (91 runs sampled) - -# .makeRe with leading star - picomatch x 1,220,856 ops/sec ±0.40% (92 runs sampled) - minimatch x 453,564 ops/sec ±1.43% (94 runs sampled) - -# .makeRe - basic braces - picomatch x 392,067 ops/sec ±0.70% (90 runs sampled) - minimatch x 99,532 ops/sec ±2.03% (87 runs sampled)) -``` - -
-
- -## Philosophies - -The goal of this library is to be blazing fast, without compromising on accuracy. - -**Accuracy** - -The number one of goal of this library is accuracy. However, it's not unusual for different glob implementations to have different rules for matching behavior, even with simple wildcard matching. It gets increasingly more complicated when combinations of different features are combined, like when extglobs are combined with globstars, braces, slashes, and so on: `!(**/{a,b,*/c})`. - -Thus, given that there is no canonical glob specification to use as a single source of truth when differences of opinion arise regarding behavior, sometimes we have to implement our best judgement and rely on feedback from users to make improvements. - -**Performance** - -Although this library performs well in benchmarks, and in most cases it's faster than other popular libraries we benchmarked against, we will always choose accuracy over performance. It's not helpful to anyone if our library is faster at returning the wrong answer. - -
-
- -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -Please read the [contributing guide](.github/contributing.md) for advice on opening issues, pull requests, and coding standards. - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -### License - -Copyright © 2017-present, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js deleted file mode 100644 index d2f2bc5..0000000 --- a/node_modules/picomatch/index.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict'; - -module.exports = require('./lib/picomatch'); diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js deleted file mode 100644 index a62ef38..0000000 --- a/node_modules/picomatch/lib/constants.js +++ /dev/null @@ -1,179 +0,0 @@ -'use strict'; - -const path = require('path'); -const WIN_SLASH = '\\\\/'; -const WIN_NO_SLASH = `[^${WIN_SLASH}]`; - -/** - * Posix glob regex - */ - -const DOT_LITERAL = '\\.'; -const PLUS_LITERAL = '\\+'; -const QMARK_LITERAL = '\\?'; -const SLASH_LITERAL = '\\/'; -const ONE_CHAR = '(?=.)'; -const QMARK = '[^/]'; -const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`; -const START_ANCHOR = `(?:^|${SLASH_LITERAL})`; -const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`; -const NO_DOT = `(?!${DOT_LITERAL})`; -const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`; -const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`; -const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`; -const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`; -const STAR = `${QMARK}*?`; - -const POSIX_CHARS = { - DOT_LITERAL, - PLUS_LITERAL, - QMARK_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - QMARK, - END_ANCHOR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK_NO_DOT, - STAR, - START_ANCHOR -}; - -/** - * Windows glob regex - */ - -const WINDOWS_CHARS = { - ...POSIX_CHARS, - - SLASH_LITERAL: `[${WIN_SLASH}]`, - QMARK: WIN_NO_SLASH, - STAR: `${WIN_NO_SLASH}*?`, - DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`, - NO_DOT: `(?!${DOT_LITERAL})`, - NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`, - NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`, - QMARK_NO_DOT: `[^.${WIN_SLASH}]`, - START_ANCHOR: `(?:^|[${WIN_SLASH}])`, - END_ANCHOR: `(?:[${WIN_SLASH}]|$)` -}; - -/** - * POSIX Bracket Regex - */ - -const POSIX_REGEX_SOURCE = { - alnum: 'a-zA-Z0-9', - alpha: 'a-zA-Z', - ascii: '\\x00-\\x7F', - blank: ' \\t', - cntrl: '\\x00-\\x1F\\x7F', - digit: '0-9', - graph: '\\x21-\\x7E', - lower: 'a-z', - print: '\\x20-\\x7E ', - punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~', - space: ' \\t\\r\\n\\v\\f', - upper: 'A-Z', - word: 'A-Za-z0-9_', - xdigit: 'A-Fa-f0-9' -}; - -module.exports = { - MAX_LENGTH: 1024 * 64, - POSIX_REGEX_SOURCE, - - // regular expressions - REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g, - REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/, - REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/, - REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g, - REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g, - REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g, - - // Replace globs with equivalent patterns to reduce parsing time. - REPLACEMENTS: { - '***': '*', - '**/**': '**', - '**/**/**': '**' - }, - - // Digits - CHAR_0: 48, /* 0 */ - CHAR_9: 57, /* 9 */ - - // Alphabet chars. - CHAR_UPPERCASE_A: 65, /* A */ - CHAR_LOWERCASE_A: 97, /* a */ - CHAR_UPPERCASE_Z: 90, /* Z */ - CHAR_LOWERCASE_Z: 122, /* z */ - - CHAR_LEFT_PARENTHESES: 40, /* ( */ - CHAR_RIGHT_PARENTHESES: 41, /* ) */ - - CHAR_ASTERISK: 42, /* * */ - - // Non-alphabetic chars. - CHAR_AMPERSAND: 38, /* & */ - CHAR_AT: 64, /* @ */ - CHAR_BACKWARD_SLASH: 92, /* \ */ - CHAR_CARRIAGE_RETURN: 13, /* \r */ - CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */ - CHAR_COLON: 58, /* : */ - CHAR_COMMA: 44, /* , */ - CHAR_DOT: 46, /* . */ - CHAR_DOUBLE_QUOTE: 34, /* " */ - CHAR_EQUAL: 61, /* = */ - CHAR_EXCLAMATION_MARK: 33, /* ! */ - CHAR_FORM_FEED: 12, /* \f */ - CHAR_FORWARD_SLASH: 47, /* / */ - CHAR_GRAVE_ACCENT: 96, /* ` */ - CHAR_HASH: 35, /* # */ - CHAR_HYPHEN_MINUS: 45, /* - */ - CHAR_LEFT_ANGLE_BRACKET: 60, /* < */ - CHAR_LEFT_CURLY_BRACE: 123, /* { */ - CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */ - CHAR_LINE_FEED: 10, /* \n */ - CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */ - CHAR_PERCENT: 37, /* % */ - CHAR_PLUS: 43, /* + */ - CHAR_QUESTION_MARK: 63, /* ? */ - CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */ - CHAR_RIGHT_CURLY_BRACE: 125, /* } */ - CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */ - CHAR_SEMICOLON: 59, /* ; */ - CHAR_SINGLE_QUOTE: 39, /* ' */ - CHAR_SPACE: 32, /* */ - CHAR_TAB: 9, /* \t */ - CHAR_UNDERSCORE: 95, /* _ */ - CHAR_VERTICAL_LINE: 124, /* | */ - CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - - SEP: path.sep, - - /** - * Create EXTGLOB_CHARS - */ - - extglobChars(chars) { - return { - '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` }, - '?': { type: 'qmark', open: '(?:', close: ')?' }, - '+': { type: 'plus', open: '(?:', close: ')+' }, - '*': { type: 'star', open: '(?:', close: ')*' }, - '@': { type: 'at', open: '(?:', close: ')' } - }; - }, - - /** - * Create GLOB_CHARS - */ - - globChars(win32) { - return win32 === true ? WINDOWS_CHARS : POSIX_CHARS; - } -}; diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js deleted file mode 100644 index 58269d0..0000000 --- a/node_modules/picomatch/lib/parse.js +++ /dev/null @@ -1,1091 +0,0 @@ -'use strict'; - -const constants = require('./constants'); -const utils = require('./utils'); - -/** - * Constants - */ - -const { - MAX_LENGTH, - POSIX_REGEX_SOURCE, - REGEX_NON_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_BACKREF, - REPLACEMENTS -} = constants; - -/** - * Helpers - */ - -const expandRange = (args, options) => { - if (typeof options.expandRange === 'function') { - return options.expandRange(...args, options); - } - - args.sort(); - const value = `[${args.join('-')}]`; - - try { - /* eslint-disable-next-line no-new */ - new RegExp(value); - } catch (ex) { - return args.map(v => utils.escapeRegex(v)).join('..'); - } - - return value; -}; - -/** - * Create the message for a syntax error - */ - -const syntaxError = (type, char) => { - return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`; -}; - -/** - * Parse the given input string. - * @param {String} input - * @param {Object} options - * @return {Object} - */ - -const parse = (input, options) => { - if (typeof input !== 'string') { - throw new TypeError('Expected a string'); - } - - input = REPLACEMENTS[input] || input; - - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - - let len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - const bos = { type: 'bos', value: '', output: opts.prepend || '' }; - const tokens = [bos]; - - const capture = opts.capture ? '' : '?:'; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const PLATFORM_CHARS = constants.globChars(win32); - const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS); - - const { - DOT_LITERAL, - PLUS_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOT_SLASH, - NO_DOTS_SLASH, - QMARK, - QMARK_NO_DOT, - STAR, - START_ANCHOR - } = PLATFORM_CHARS; - - const globstar = opts => { - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const nodot = opts.dot ? '' : NO_DOT; - const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT; - let star = opts.bash === true ? globstar(opts) : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - // minimatch options support - if (typeof opts.noext === 'boolean') { - opts.noextglob = opts.noext; - } - - const state = { - input, - index: -1, - start: 0, - dot: opts.dot === true, - consumed: '', - output: '', - prefix: '', - backtrack: false, - negated: false, - brackets: 0, - braces: 0, - parens: 0, - quotes: 0, - globstar: false, - tokens - }; - - input = utils.removePrefix(input, state); - len = input.length; - - const extglobs = []; - const braces = []; - const stack = []; - let prev = bos; - let value; - - /** - * Tokenizing helpers - */ - - const eos = () => state.index === len - 1; - const peek = state.peek = (n = 1) => input[state.index + n]; - const advance = state.advance = () => input[++state.index] || ''; - const remaining = () => input.slice(state.index + 1); - const consume = (value = '', num = 0) => { - state.consumed += value; - state.index += num; - }; - - const append = token => { - state.output += token.output != null ? token.output : token.value; - consume(token.value); - }; - - const negate = () => { - let count = 1; - - while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) { - advance(); - state.start++; - count++; - } - - if (count % 2 === 0) { - return false; - } - - state.negated = true; - state.start++; - return true; - }; - - const increment = type => { - state[type]++; - stack.push(type); - }; - - const decrement = type => { - state[type]--; - stack.pop(); - }; - - /** - * Push tokens onto the tokens array. This helper speeds up - * tokenizing by 1) helping us avoid backtracking as much as possible, - * and 2) helping us avoid creating extra tokens when consecutive - * characters are plain text. This improves performance and simplifies - * lookbehinds. - */ - - const push = tok => { - if (prev.type === 'globstar') { - const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace'); - const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren')); - - if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) { - state.output = state.output.slice(0, -prev.output.length); - prev.type = 'star'; - prev.value = '*'; - prev.output = star; - state.output += prev.output; - } - } - - if (extglobs.length && tok.type !== 'paren') { - extglobs[extglobs.length - 1].inner += tok.value; - } - - if (tok.value || tok.output) append(tok); - if (prev && prev.type === 'text' && tok.type === 'text') { - prev.value += tok.value; - prev.output = (prev.output || '') + tok.value; - return; - } - - tok.prev = prev; - tokens.push(tok); - prev = tok; - }; - - const extglobOpen = (type, value) => { - const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' }; - - token.prev = prev; - token.parens = state.parens; - token.output = state.output; - const output = (opts.capture ? '(' : '') + token.open; - - increment('parens'); - push({ type, value, output: state.output ? '' : ONE_CHAR }); - push({ type: 'paren', extglob: true, value: advance(), output }); - extglobs.push(token); - }; - - const extglobClose = token => { - let output = token.close + (opts.capture ? ')' : ''); - let rest; - - if (token.type === 'negate') { - let extglobStar = star; - - if (token.inner && token.inner.length > 1 && token.inner.includes('/')) { - extglobStar = globstar(opts); - } - - if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) { - output = token.close = `)$))${extglobStar}`; - } - - if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) { - // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis. - // In this case, we need to parse the string and use it in the output of the original pattern. - // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`. - // - // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`. - const expression = parse(rest, { ...options, fastpaths: false }).output; - - output = token.close = `)${expression})${extglobStar})`; - } - - if (token.prev.type === 'bos') { - state.negatedExtglob = true; - } - } - - push({ type: 'paren', extglob: true, value, output }); - decrement('parens'); - }; - - /** - * Fast paths - */ - - if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) { - let backslashes = false; - - let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => { - if (first === '\\') { - backslashes = true; - return m; - } - - if (first === '?') { - if (esc) { - return esc + first + (rest ? QMARK.repeat(rest.length) : ''); - } - if (index === 0) { - return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : ''); - } - return QMARK.repeat(chars.length); - } - - if (first === '.') { - return DOT_LITERAL.repeat(chars.length); - } - - if (first === '*') { - if (esc) { - return esc + first + (rest ? star : ''); - } - return star; - } - return esc ? m : `\\${m}`; - }); - - if (backslashes === true) { - if (opts.unescape === true) { - output = output.replace(/\\/g, ''); - } else { - output = output.replace(/\\+/g, m => { - return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : ''); - }); - } - } - - if (output === input && opts.contains === true) { - state.output = input; - return state; - } - - state.output = utils.wrapOutput(output, state, options); - return state; - } - - /** - * Tokenize input until we reach end-of-string - */ - - while (!eos()) { - value = advance(); - - if (value === '\u0000') { - continue; - } - - /** - * Escaped characters - */ - - if (value === '\\') { - const next = peek(); - - if (next === '/' && opts.bash !== true) { - continue; - } - - if (next === '.' || next === ';') { - continue; - } - - if (!next) { - value += '\\'; - push({ type: 'text', value }); - continue; - } - - // collapse slashes to reduce potential for exploits - const match = /^\\+/.exec(remaining()); - let slashes = 0; - - if (match && match[0].length > 2) { - slashes = match[0].length; - state.index += slashes; - if (slashes % 2 !== 0) { - value += '\\'; - } - } - - if (opts.unescape === true) { - value = advance(); - } else { - value += advance(); - } - - if (state.brackets === 0) { - push({ type: 'text', value }); - continue; - } - } - - /** - * If we're inside a regex character class, continue - * until we reach the closing bracket. - */ - - if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) { - if (opts.posix !== false && value === ':') { - const inner = prev.value.slice(1); - if (inner.includes('[')) { - prev.posix = true; - - if (inner.includes(':')) { - const idx = prev.value.lastIndexOf('['); - const pre = prev.value.slice(0, idx); - const rest = prev.value.slice(idx + 2); - const posix = POSIX_REGEX_SOURCE[rest]; - if (posix) { - prev.value = pre + posix; - state.backtrack = true; - advance(); - - if (!bos.output && tokens.indexOf(prev) === 1) { - bos.output = ONE_CHAR; - } - continue; - } - } - } - } - - if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) { - value = `\\${value}`; - } - - if (value === ']' && (prev.value === '[' || prev.value === '[^')) { - value = `\\${value}`; - } - - if (opts.posix === true && value === '!' && prev.value === '[') { - value = '^'; - } - - prev.value += value; - append({ value }); - continue; - } - - /** - * If we're inside a quoted string, continue - * until we reach the closing double quote. - */ - - if (state.quotes === 1 && value !== '"') { - value = utils.escapeRegex(value); - prev.value += value; - append({ value }); - continue; - } - - /** - * Double quotes - */ - - if (value === '"') { - state.quotes = state.quotes === 1 ? 0 : 1; - if (opts.keepQuotes === true) { - push({ type: 'text', value }); - } - continue; - } - - /** - * Parentheses - */ - - if (value === '(') { - increment('parens'); - push({ type: 'paren', value }); - continue; - } - - if (value === ')') { - if (state.parens === 0 && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '(')); - } - - const extglob = extglobs[extglobs.length - 1]; - if (extglob && state.parens === extglob.parens + 1) { - extglobClose(extglobs.pop()); - continue; - } - - push({ type: 'paren', value, output: state.parens ? ')' : '\\)' }); - decrement('parens'); - continue; - } - - /** - * Square brackets - */ - - if (value === '[') { - if (opts.nobracket === true || !remaining().includes(']')) { - if (opts.nobracket !== true && opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('closing', ']')); - } - - value = `\\${value}`; - } else { - increment('brackets'); - } - - push({ type: 'bracket', value }); - continue; - } - - if (value === ']') { - if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) { - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - if (state.brackets === 0) { - if (opts.strictBrackets === true) { - throw new SyntaxError(syntaxError('opening', '[')); - } - - push({ type: 'text', value, output: `\\${value}` }); - continue; - } - - decrement('brackets'); - - const prevValue = prev.value.slice(1); - if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) { - value = `/${value}`; - } - - prev.value += value; - append({ value }); - - // when literal brackets are explicitly disabled - // assume we should match with a regex character class - if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) { - continue; - } - - const escaped = utils.escapeRegex(prev.value); - state.output = state.output.slice(0, -prev.value.length); - - // when literal brackets are explicitly enabled - // assume we should escape the brackets to match literal characters - if (opts.literalBrackets === true) { - state.output += escaped; - prev.value = escaped; - continue; - } - - // when the user specifies nothing, try to match both - prev.value = `(${capture}${escaped}|${prev.value})`; - state.output += prev.value; - continue; - } - - /** - * Braces - */ - - if (value === '{' && opts.nobrace !== true) { - increment('braces'); - - const open = { - type: 'brace', - value, - output: '(', - outputIndex: state.output.length, - tokensIndex: state.tokens.length - }; - - braces.push(open); - push(open); - continue; - } - - if (value === '}') { - const brace = braces[braces.length - 1]; - - if (opts.nobrace === true || !brace) { - push({ type: 'text', value, output: value }); - continue; - } - - let output = ')'; - - if (brace.dots === true) { - const arr = tokens.slice(); - const range = []; - - for (let i = arr.length - 1; i >= 0; i--) { - tokens.pop(); - if (arr[i].type === 'brace') { - break; - } - if (arr[i].type !== 'dots') { - range.unshift(arr[i].value); - } - } - - output = expandRange(range, opts); - state.backtrack = true; - } - - if (brace.comma !== true && brace.dots !== true) { - const out = state.output.slice(0, brace.outputIndex); - const toks = state.tokens.slice(brace.tokensIndex); - brace.value = brace.output = '\\{'; - value = output = '\\}'; - state.output = out; - for (const t of toks) { - state.output += (t.output || t.value); - } - } - - push({ type: 'brace', value, output }); - decrement('braces'); - braces.pop(); - continue; - } - - /** - * Pipes - */ - - if (value === '|') { - if (extglobs.length > 0) { - extglobs[extglobs.length - 1].conditions++; - } - push({ type: 'text', value }); - continue; - } - - /** - * Commas - */ - - if (value === ',') { - let output = value; - - const brace = braces[braces.length - 1]; - if (brace && stack[stack.length - 1] === 'braces') { - brace.comma = true; - output = '|'; - } - - push({ type: 'comma', value, output }); - continue; - } - - /** - * Slashes - */ - - if (value === '/') { - // if the beginning of the glob is "./", advance the start - // to the current index, and don't add the "./" characters - // to the state. This greatly simplifies lookbehinds when - // checking for BOS characters like "!" and "." (not "./") - if (prev.type === 'dot' && state.index === state.start + 1) { - state.start = state.index + 1; - state.consumed = ''; - state.output = ''; - tokens.pop(); - prev = bos; // reset "prev" to the first token - continue; - } - - push({ type: 'slash', value, output: SLASH_LITERAL }); - continue; - } - - /** - * Dots - */ - - if (value === '.') { - if (state.braces > 0 && prev.type === 'dot') { - if (prev.value === '.') prev.output = DOT_LITERAL; - const brace = braces[braces.length - 1]; - prev.type = 'dots'; - prev.output += value; - prev.value += value; - brace.dots = true; - continue; - } - - if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') { - push({ type: 'text', value, output: DOT_LITERAL }); - continue; - } - - push({ type: 'dot', value, output: DOT_LITERAL }); - continue; - } - - /** - * Question marks - */ - - if (value === '?') { - const isGroup = prev && prev.value === '('; - if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('qmark', value); - continue; - } - - if (prev && prev.type === 'paren') { - const next = peek(); - let output = value; - - if (next === '<' && !utils.supportsLookbehinds()) { - throw new Error('Node.js v10 or higher is required for regex lookbehinds'); - } - - if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) { - output = `\\${value}`; - } - - push({ type: 'text', value, output }); - continue; - } - - if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) { - push({ type: 'qmark', value, output: QMARK_NO_DOT }); - continue; - } - - push({ type: 'qmark', value, output: QMARK }); - continue; - } - - /** - * Exclamation - */ - - if (value === '!') { - if (opts.noextglob !== true && peek() === '(') { - if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) { - extglobOpen('negate', value); - continue; - } - } - - if (opts.nonegate !== true && state.index === 0) { - negate(); - continue; - } - } - - /** - * Plus - */ - - if (value === '+') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - extglobOpen('plus', value); - continue; - } - - if ((prev && prev.value === '(') || opts.regex === false) { - push({ type: 'plus', value, output: PLUS_LITERAL }); - continue; - } - - if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) { - push({ type: 'plus', value }); - continue; - } - - push({ type: 'plus', value: PLUS_LITERAL }); - continue; - } - - /** - * Plain text - */ - - if (value === '@') { - if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') { - push({ type: 'at', extglob: true, value, output: '' }); - continue; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Plain text - */ - - if (value !== '*') { - if (value === '$' || value === '^') { - value = `\\${value}`; - } - - const match = REGEX_NON_SPECIAL_CHARS.exec(remaining()); - if (match) { - value += match[0]; - state.index += match[0].length; - } - - push({ type: 'text', value }); - continue; - } - - /** - * Stars - */ - - if (prev && (prev.type === 'globstar' || prev.star === true)) { - prev.type = 'star'; - prev.star = true; - prev.value += value; - prev.output = star; - state.backtrack = true; - state.globstar = true; - consume(value); - continue; - } - - let rest = remaining(); - if (opts.noextglob !== true && /^\([^?]/.test(rest)) { - extglobOpen('star', value); - continue; - } - - if (prev.type === 'star') { - if (opts.noglobstar === true) { - consume(value); - continue; - } - - const prior = prev.prev; - const before = prior.prev; - const isStart = prior.type === 'slash' || prior.type === 'bos'; - const afterStar = before && (before.type === 'star' || before.type === 'globstar'); - - if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) { - push({ type: 'star', value, output: '' }); - continue; - } - - const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace'); - const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren'); - if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) { - push({ type: 'star', value, output: '' }); - continue; - } - - // strip consecutive `/**/` - while (rest.slice(0, 3) === '/**') { - const after = input[state.index + 4]; - if (after && after !== '/') { - break; - } - rest = rest.slice(3); - consume('/**', 3); - } - - if (prior.type === 'bos' && eos()) { - prev.type = 'globstar'; - prev.value += value; - prev.output = globstar(opts); - state.output = prev.output; - state.globstar = true; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) { - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)'); - prev.value += value; - state.globstar = true; - state.output += prior.output + prev.output; - consume(value); - continue; - } - - if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') { - const end = rest[1] !== void 0 ? '|$' : ''; - - state.output = state.output.slice(0, -(prior.output + prev.output).length); - prior.output = `(?:${prior.output}`; - - prev.type = 'globstar'; - prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`; - prev.value += value; - - state.output += prior.output + prev.output; - state.globstar = true; - - consume(value + advance()); - - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - if (prior.type === 'bos' && rest[0] === '/') { - prev.type = 'globstar'; - prev.value += value; - prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`; - state.output = prev.output; - state.globstar = true; - consume(value + advance()); - push({ type: 'slash', value: '/', output: '' }); - continue; - } - - // remove single star from output - state.output = state.output.slice(0, -prev.output.length); - - // reset previous token to globstar - prev.type = 'globstar'; - prev.output = globstar(opts); - prev.value += value; - - // reset output with globstar - state.output += prev.output; - state.globstar = true; - consume(value); - continue; - } - - const token = { type: 'star', value, output: star }; - - if (opts.bash === true) { - token.output = '.*?'; - if (prev.type === 'bos' || prev.type === 'slash') { - token.output = nodot + token.output; - } - push(token); - continue; - } - - if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) { - token.output = value; - push(token); - continue; - } - - if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') { - if (prev.type === 'dot') { - state.output += NO_DOT_SLASH; - prev.output += NO_DOT_SLASH; - - } else if (opts.dot === true) { - state.output += NO_DOTS_SLASH; - prev.output += NO_DOTS_SLASH; - - } else { - state.output += nodot; - prev.output += nodot; - } - - if (peek() !== '*') { - state.output += ONE_CHAR; - prev.output += ONE_CHAR; - } - } - - push(token); - } - - while (state.brackets > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']')); - state.output = utils.escapeLast(state.output, '['); - decrement('brackets'); - } - - while (state.parens > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')')); - state.output = utils.escapeLast(state.output, '('); - decrement('parens'); - } - - while (state.braces > 0) { - if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}')); - state.output = utils.escapeLast(state.output, '{'); - decrement('braces'); - } - - if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) { - push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` }); - } - - // rebuild the output if we had to backtrack at any point - if (state.backtrack === true) { - state.output = ''; - - for (const token of state.tokens) { - state.output += token.output != null ? token.output : token.value; - - if (token.suffix) { - state.output += token.suffix; - } - } - } - - return state; -}; - -/** - * Fast paths for creating regular expressions for common glob patterns. - * This can significantly speed up processing and has very little downside - * impact when none of the fast paths match. - */ - -parse.fastpaths = (input, options) => { - const opts = { ...options }; - const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH; - const len = input.length; - if (len > max) { - throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`); - } - - input = REPLACEMENTS[input] || input; - const win32 = utils.isWindows(options); - - // create constants based on platform, for windows or posix - const { - DOT_LITERAL, - SLASH_LITERAL, - ONE_CHAR, - DOTS_SLASH, - NO_DOT, - NO_DOTS, - NO_DOTS_SLASH, - STAR, - START_ANCHOR - } = constants.globChars(win32); - - const nodot = opts.dot ? NO_DOTS : NO_DOT; - const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT; - const capture = opts.capture ? '' : '?:'; - const state = { negated: false, prefix: '' }; - let star = opts.bash === true ? '.*?' : STAR; - - if (opts.capture) { - star = `(${star})`; - } - - const globstar = opts => { - if (opts.noglobstar === true) return star; - return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`; - }; - - const create = str => { - switch (str) { - case '*': - return `${nodot}${ONE_CHAR}${star}`; - - case '.*': - return `${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*.*': - return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '*/*': - return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`; - - case '**': - return nodot + globstar(opts); - - case '**/*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`; - - case '**/*.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`; - - case '**/.*': - return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`; - - default: { - const match = /^(.*?)\.(\w+)$/.exec(str); - if (!match) return; - - const source = create(match[1]); - if (!source) return; - - return source + DOT_LITERAL + match[2]; - } - } - }; - - const output = utils.removePrefix(input, state); - let source = create(output); - - if (source && opts.strictSlashes !== true) { - source += `${SLASH_LITERAL}?`; - } - - return source; -}; - -module.exports = parse; diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js deleted file mode 100644 index 782d809..0000000 --- a/node_modules/picomatch/lib/picomatch.js +++ /dev/null @@ -1,342 +0,0 @@ -'use strict'; - -const path = require('path'); -const scan = require('./scan'); -const parse = require('./parse'); -const utils = require('./utils'); -const constants = require('./constants'); -const isObject = val => val && typeof val === 'object' && !Array.isArray(val); - -/** - * Creates a matcher function from one or more glob patterns. The - * returned function takes a string to match as its first argument, - * and returns true if the string is a match. The returned matcher - * function also takes a boolean as the second argument that, when true, - * returns an object with additional information. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch(glob[, options]); - * - * const isMatch = picomatch('*.!(*a)'); - * console.log(isMatch('a.a')); //=> false - * console.log(isMatch('a.b')); //=> true - * ``` - * @name picomatch - * @param {String|Array} `globs` One or more glob patterns. - * @param {Object=} `options` - * @return {Function=} Returns a matcher function. - * @api public - */ - -const picomatch = (glob, options, returnState = false) => { - if (Array.isArray(glob)) { - const fns = glob.map(input => picomatch(input, options, returnState)); - const arrayMatcher = str => { - for (const isMatch of fns) { - const state = isMatch(str); - if (state) return state; - } - return false; - }; - return arrayMatcher; - } - - const isState = isObject(glob) && glob.tokens && glob.input; - - if (glob === '' || (typeof glob !== 'string' && !isState)) { - throw new TypeError('Expected pattern to be a non-empty string'); - } - - const opts = options || {}; - const posix = utils.isWindows(options); - const regex = isState - ? picomatch.compileRe(glob, options) - : picomatch.makeRe(glob, options, false, true); - - const state = regex.state; - delete regex.state; - - let isIgnored = () => false; - if (opts.ignore) { - const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null }; - isIgnored = picomatch(opts.ignore, ignoreOpts, returnState); - } - - const matcher = (input, returnObject = false) => { - const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix }); - const result = { glob, state, regex, posix, input, output, match, isMatch }; - - if (typeof opts.onResult === 'function') { - opts.onResult(result); - } - - if (isMatch === false) { - result.isMatch = false; - return returnObject ? result : false; - } - - if (isIgnored(input)) { - if (typeof opts.onIgnore === 'function') { - opts.onIgnore(result); - } - result.isMatch = false; - return returnObject ? result : false; - } - - if (typeof opts.onMatch === 'function') { - opts.onMatch(result); - } - return returnObject ? result : true; - }; - - if (returnState) { - matcher.state = state; - } - - return matcher; -}; - -/** - * Test `input` with the given `regex`. This is used by the main - * `picomatch()` function to test the input string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.test(input, regex[, options]); - * - * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/)); - * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' } - * ``` - * @param {String} `input` String to test. - * @param {RegExp} `regex` - * @return {Object} Returns an object with matching info. - * @api public - */ - -picomatch.test = (input, regex, options, { glob, posix } = {}) => { - if (typeof input !== 'string') { - throw new TypeError('Expected input to be a string'); - } - - if (input === '') { - return { isMatch: false, output: '' }; - } - - const opts = options || {}; - const format = opts.format || (posix ? utils.toPosixSlashes : null); - let match = input === glob; - let output = (match && format) ? format(input) : input; - - if (match === false) { - output = format ? format(input) : input; - match = output === glob; - } - - if (match === false || opts.capture === true) { - if (opts.matchBase === true || opts.basename === true) { - match = picomatch.matchBase(input, regex, options, posix); - } else { - match = regex.exec(output); - } - } - - return { isMatch: Boolean(match), match, output }; -}; - -/** - * Match the basename of a filepath. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.matchBase(input, glob[, options]); - * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true - * ``` - * @param {String} `input` String to test. - * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe). - * @return {Boolean} - * @api public - */ - -picomatch.matchBase = (input, glob, options, posix = utils.isWindows(options)) => { - const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options); - return regex.test(path.basename(input)); -}; - -/** - * Returns true if **any** of the given glob `patterns` match the specified `string`. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.isMatch(string, patterns[, options]); - * - * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true - * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false - * ``` - * @param {String|Array} str The string to test. - * @param {String|Array} patterns One or more glob patterns to use for matching. - * @param {Object} [options] See available [options](#options). - * @return {Boolean} Returns true if any patterns match `str` - * @api public - */ - -picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str); - -/** - * Parse a glob pattern to create the source string for a regular - * expression. - * - * ```js - * const picomatch = require('picomatch'); - * const result = picomatch.parse(pattern[, options]); - * ``` - * @param {String} `pattern` - * @param {Object} `options` - * @return {Object} Returns an object with useful properties and output to be used as a regex source string. - * @api public - */ - -picomatch.parse = (pattern, options) => { - if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options)); - return parse(pattern, { ...options, fastpaths: false }); -}; - -/** - * Scan a glob pattern to separate the pattern into segments. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.scan(input[, options]); - * - * const result = picomatch.scan('!./foo/*.js'); - * console.log(result); - * { prefix: '!./', - * input: '!./foo/*.js', - * start: 3, - * base: 'foo', - * glob: '*.js', - * isBrace: false, - * isBracket: false, - * isGlob: true, - * isExtglob: false, - * isGlobstar: false, - * negated: true } - * ``` - * @param {String} `input` Glob pattern to scan. - * @param {Object} `options` - * @return {Object} Returns an object with - * @api public - */ - -picomatch.scan = (input, options) => scan(input, options); - -/** - * Compile a regular expression from the `state` object returned by the - * [parse()](#parse) method. - * - * @param {Object} `state` - * @param {Object} `options` - * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser. - * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging. - * @return {RegExp} - * @api public - */ - -picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => { - if (returnOutput === true) { - return state.output; - } - - const opts = options || {}; - const prepend = opts.contains ? '' : '^'; - const append = opts.contains ? '' : '$'; - - let source = `${prepend}(?:${state.output})${append}`; - if (state && state.negated === true) { - source = `^(?!${source}).*$`; - } - - const regex = picomatch.toRegex(source, options); - if (returnState === true) { - regex.state = state; - } - - return regex; -}; - -/** - * Create a regular expression from a parsed glob pattern. - * - * ```js - * const picomatch = require('picomatch'); - * const state = picomatch.parse('*.js'); - * // picomatch.compileRe(state[, options]); - * - * console.log(picomatch.compileRe(state)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `state` The object returned from the `.parse` method. - * @param {Object} `options` - * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result. - * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression. - * @return {RegExp} Returns a regex created from the given pattern. - * @api public - */ - -picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => { - if (!input || typeof input !== 'string') { - throw new TypeError('Expected a non-empty string'); - } - - let parsed = { negated: false, fastpaths: true }; - - if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) { - parsed.output = parse.fastpaths(input, options); - } - - if (!parsed.output) { - parsed = parse(input, options); - } - - return picomatch.compileRe(parsed, options, returnOutput, returnState); -}; - -/** - * Create a regular expression from the given regex source string. - * - * ```js - * const picomatch = require('picomatch'); - * // picomatch.toRegex(source[, options]); - * - * const { output } = picomatch.parse('*.js'); - * console.log(picomatch.toRegex(output)); - * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/ - * ``` - * @param {String} `source` Regular expression source string. - * @param {Object} `options` - * @return {RegExp} - * @api public - */ - -picomatch.toRegex = (source, options) => { - try { - const opts = options || {}; - return new RegExp(source, opts.flags || (opts.nocase ? 'i' : '')); - } catch (err) { - if (options && options.debug === true) throw err; - return /$^/; - } -}; - -/** - * Picomatch constants. - * @return {Object} - */ - -picomatch.constants = constants; - -/** - * Expose "picomatch" - */ - -module.exports = picomatch; diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js deleted file mode 100644 index e59cd7a..0000000 --- a/node_modules/picomatch/lib/scan.js +++ /dev/null @@ -1,391 +0,0 @@ -'use strict'; - -const utils = require('./utils'); -const { - CHAR_ASTERISK, /* * */ - CHAR_AT, /* @ */ - CHAR_BACKWARD_SLASH, /* \ */ - CHAR_COMMA, /* , */ - CHAR_DOT, /* . */ - CHAR_EXCLAMATION_MARK, /* ! */ - CHAR_FORWARD_SLASH, /* / */ - CHAR_LEFT_CURLY_BRACE, /* { */ - CHAR_LEFT_PARENTHESES, /* ( */ - CHAR_LEFT_SQUARE_BRACKET, /* [ */ - CHAR_PLUS, /* + */ - CHAR_QUESTION_MARK, /* ? */ - CHAR_RIGHT_CURLY_BRACE, /* } */ - CHAR_RIGHT_PARENTHESES, /* ) */ - CHAR_RIGHT_SQUARE_BRACKET /* ] */ -} = require('./constants'); - -const isPathSeparator = code => { - return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; -}; - -const depth = token => { - if (token.isPrefix !== true) { - token.depth = token.isGlobstar ? Infinity : 1; - } -}; - -/** - * Quickly scans a glob pattern and returns an object with a handful of - * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists), - * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not - * with `!(`) and `negatedExtglob` (true if the path starts with `!(`). - * - * ```js - * const pm = require('picomatch'); - * console.log(pm.scan('foo/bar/*.js')); - * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' } - * ``` - * @param {String} `str` - * @param {Object} `options` - * @return {Object} Returns an object with tokens and regex source string. - * @api public - */ - -const scan = (input, options) => { - const opts = options || {}; - - const length = input.length - 1; - const scanToEnd = opts.parts === true || opts.scanToEnd === true; - const slashes = []; - const tokens = []; - const parts = []; - - let str = input; - let index = -1; - let start = 0; - let lastIndex = 0; - let isBrace = false; - let isBracket = false; - let isGlob = false; - let isExtglob = false; - let isGlobstar = false; - let braceEscaped = false; - let backslashes = false; - let negated = false; - let negatedExtglob = false; - let finished = false; - let braces = 0; - let prev; - let code; - let token = { value: '', depth: 0, isGlob: false }; - - const eos = () => index >= length; - const peek = () => str.charCodeAt(index + 1); - const advance = () => { - prev = code; - return str.charCodeAt(++index); - }; - - while (index < length) { - code = advance(); - let next; - - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - - if (code === CHAR_LEFT_CURLY_BRACE) { - braceEscaped = true; - } - continue; - } - - if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) { - braces++; - - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (code === CHAR_LEFT_CURLY_BRACE) { - braces++; - continue; - } - - if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (braceEscaped !== true && code === CHAR_COMMA) { - isBrace = token.isBrace = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_RIGHT_CURLY_BRACE) { - braces--; - - if (braces === 0) { - braceEscaped = false; - isBrace = token.isBrace = true; - finished = true; - break; - } - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (code === CHAR_FORWARD_SLASH) { - slashes.push(index); - tokens.push(token); - token = { value: '', depth: 0, isGlob: false }; - - if (finished === true) continue; - if (prev === CHAR_DOT && index === (start + 1)) { - start += 2; - continue; - } - - lastIndex = index + 1; - continue; - } - - if (opts.noext !== true) { - const isExtglobChar = code === CHAR_PLUS - || code === CHAR_AT - || code === CHAR_ASTERISK - || code === CHAR_QUESTION_MARK - || code === CHAR_EXCLAMATION_MARK; - - if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - isExtglob = token.isExtglob = true; - finished = true; - if (code === CHAR_EXCLAMATION_MARK && index === start) { - negatedExtglob = true; - } - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - isGlob = token.isGlob = true; - finished = true; - break; - } - } - continue; - } - break; - } - } - - if (code === CHAR_ASTERISK) { - if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true; - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_QUESTION_MARK) { - isGlob = token.isGlob = true; - finished = true; - - if (scanToEnd === true) { - continue; - } - break; - } - - if (code === CHAR_LEFT_SQUARE_BRACKET) { - while (eos() !== true && (next = advance())) { - if (next === CHAR_BACKWARD_SLASH) { - backslashes = token.backslashes = true; - advance(); - continue; - } - - if (next === CHAR_RIGHT_SQUARE_BRACKET) { - isBracket = token.isBracket = true; - isGlob = token.isGlob = true; - finished = true; - break; - } - } - - if (scanToEnd === true) { - continue; - } - - break; - } - - if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) { - negated = token.negated = true; - start++; - continue; - } - - if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) { - isGlob = token.isGlob = true; - - if (scanToEnd === true) { - while (eos() !== true && (code = advance())) { - if (code === CHAR_LEFT_PARENTHESES) { - backslashes = token.backslashes = true; - code = advance(); - continue; - } - - if (code === CHAR_RIGHT_PARENTHESES) { - finished = true; - break; - } - } - continue; - } - break; - } - - if (isGlob === true) { - finished = true; - - if (scanToEnd === true) { - continue; - } - - break; - } - } - - if (opts.noext === true) { - isExtglob = false; - isGlob = false; - } - - let base = str; - let prefix = ''; - let glob = ''; - - if (start > 0) { - prefix = str.slice(0, start); - str = str.slice(start); - lastIndex -= start; - } - - if (base && isGlob === true && lastIndex > 0) { - base = str.slice(0, lastIndex); - glob = str.slice(lastIndex); - } else if (isGlob === true) { - base = ''; - glob = str; - } else { - base = str; - } - - if (base && base !== '' && base !== '/' && base !== str) { - if (isPathSeparator(base.charCodeAt(base.length - 1))) { - base = base.slice(0, -1); - } - } - - if (opts.unescape === true) { - if (glob) glob = utils.removeBackslashes(glob); - - if (base && backslashes === true) { - base = utils.removeBackslashes(base); - } - } - - const state = { - prefix, - input, - start, - base, - glob, - isBrace, - isBracket, - isGlob, - isExtglob, - isGlobstar, - negated, - negatedExtglob - }; - - if (opts.tokens === true) { - state.maxDepth = 0; - if (!isPathSeparator(code)) { - tokens.push(token); - } - state.tokens = tokens; - } - - if (opts.parts === true || opts.tokens === true) { - let prevIndex; - - for (let idx = 0; idx < slashes.length; idx++) { - const n = prevIndex ? prevIndex + 1 : start; - const i = slashes[idx]; - const value = input.slice(n, i); - if (opts.tokens) { - if (idx === 0 && start !== 0) { - tokens[idx].isPrefix = true; - tokens[idx].value = prefix; - } else { - tokens[idx].value = value; - } - depth(tokens[idx]); - state.maxDepth += tokens[idx].depth; - } - if (idx !== 0 || value !== '') { - parts.push(value); - } - prevIndex = i; - } - - if (prevIndex && prevIndex + 1 < input.length) { - const value = input.slice(prevIndex + 1); - parts.push(value); - - if (opts.tokens) { - tokens[tokens.length - 1].value = value; - depth(tokens[tokens.length - 1]); - state.maxDepth += tokens[tokens.length - 1].depth; - } - } - - state.slashes = slashes; - state.parts = parts; - } - - return state; -}; - -module.exports = scan; diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js deleted file mode 100644 index c3ca766..0000000 --- a/node_modules/picomatch/lib/utils.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -const path = require('path'); -const win32 = process.platform === 'win32'; -const { - REGEX_BACKSLASH, - REGEX_REMOVE_BACKSLASH, - REGEX_SPECIAL_CHARS, - REGEX_SPECIAL_CHARS_GLOBAL -} = require('./constants'); - -exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val); -exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str); -exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str); -exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1'); -exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/'); - -exports.removeBackslashes = str => { - return str.replace(REGEX_REMOVE_BACKSLASH, match => { - return match === '\\' ? '' : match; - }); -}; - -exports.supportsLookbehinds = () => { - const segs = process.version.slice(1).split('.').map(Number); - if (segs.length === 3 && segs[0] >= 9 || (segs[0] === 8 && segs[1] >= 10)) { - return true; - } - return false; -}; - -exports.isWindows = options => { - if (options && typeof options.windows === 'boolean') { - return options.windows; - } - return win32 === true || path.sep === '\\'; -}; - -exports.escapeLast = (input, char, lastIdx) => { - const idx = input.lastIndexOf(char, lastIdx); - if (idx === -1) return input; - if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1); - return `${input.slice(0, idx)}\\${input.slice(idx)}`; -}; - -exports.removePrefix = (input, state = {}) => { - let output = input; - if (output.startsWith('./')) { - output = output.slice(2); - state.prefix = './'; - } - return output; -}; - -exports.wrapOutput = (input, state = {}, options = {}) => { - const prepend = options.contains ? '' : '^'; - const append = options.contains ? '' : '$'; - - let output = `${prepend}(?:${input})${append}`; - if (state.negated === true) { - output = `(?:^(?!${output}).*$)`; - } - return output; -}; diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json deleted file mode 100644 index 3db22d4..0000000 --- a/node_modules/picomatch/package.json +++ /dev/null @@ -1,81 +0,0 @@ -{ - "name": "picomatch", - "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.", - "version": "2.3.1", - "homepage": "https://github.com/micromatch/picomatch", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "funding": "https://github.com/sponsors/jonschlinkert", - "repository": "micromatch/picomatch", - "bugs": { - "url": "https://github.com/micromatch/picomatch/issues" - }, - "license": "MIT", - "files": [ - "index.js", - "lib" - ], - "main": "index.js", - "engines": { - "node": ">=8.6" - }, - "scripts": { - "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .", - "mocha": "mocha --reporter dot", - "test": "npm run lint && npm run mocha", - "test:ci": "npm run test:cover", - "test:cover": "nyc npm run mocha" - }, - "devDependencies": { - "eslint": "^6.8.0", - "fill-range": "^7.0.1", - "gulp-format-md": "^2.0.0", - "mocha": "^6.2.2", - "nyc": "^15.0.0", - "time-require": "github:jonschlinkert/time-require" - }, - "keywords": [ - "glob", - "match", - "picomatch" - ], - "nyc": { - "reporter": [ - "html", - "lcov", - "text-summary" - ] - }, - "verb": { - "toc": { - "render": true, - "method": "preWrite", - "maxdepth": 3 - }, - "layout": "empty", - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "related": { - "list": [ - "braces", - "micromatch" - ] - }, - "reflinks": [ - "braces", - "expand-brackets", - "extglob", - "fill-range", - "micromatch", - "minimatch", - "nanomatch", - "picomatch" - ] - } -} diff --git a/node_modules/queue-microtask/LICENSE b/node_modules/queue-microtask/LICENSE deleted file mode 100755 index c7e6852..0000000 --- a/node_modules/queue-microtask/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/queue-microtask/README.md b/node_modules/queue-microtask/README.md deleted file mode 100644 index 0be05a6..0000000 --- a/node_modules/queue-microtask/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# queue-microtask [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[ci-image]: https://img.shields.io/github/workflow/status/feross/queue-microtask/ci/master -[ci-url]: https://github.com/feross/queue-microtask/actions -[npm-image]: https://img.shields.io/npm/v/queue-microtask.svg -[npm-url]: https://npmjs.org/package/queue-microtask -[downloads-image]: https://img.shields.io/npm/dm/queue-microtask.svg -[downloads-url]: https://npmjs.org/package/queue-microtask -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### fast, tiny [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) shim for modern engines - -- Use [`queueMicrotask`](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/queueMicrotask) in all modern JS engines. -- No dependencies. Less than 10 lines. No shims or complicated fallbacks. -- Optimal performance in all modern environments - - Uses `queueMicrotask` in modern environments - - Fallback to `Promise.resolve().then(fn)` in Node.js 10 and earlier, and old browsers (same performance as `queueMicrotask`) - -## install - -``` -npm install queue-microtask -``` - -## usage - -```js -const queueMicrotask = require('queue-microtask') - -queueMicrotask(() => { /* this will run soon */ }) -``` - -## What is `queueMicrotask` and why would one use it? - -The `queueMicrotask` function is a WHATWG standard. It queues a microtask to be executed prior to control returning to the event loop. - -A microtask is a short function which will run after the current task has completed its work and when there is no other code waiting to be run before control of the execution context is returned to the event loop. - -The code `queueMicrotask(fn)` is equivalent to the code `Promise.resolve().then(fn)`. It is also very similar to [`process.nextTick(fn)`](https://nodejs.org/api/process.html#process_process_nexttick_callback_args) in Node. - -Using microtasks lets code run without interfering with any other, potentially higher priority, code that is pending, but before the JS engine regains control over the execution context. - -See the [spec](https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#microtask-queuing) or [Node documentation](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback) for more information. - -## Who is this package for? - -This package allows you to use `queueMicrotask` safely in all modern JS engines. Use it if you prioritize small JS bundle size over support for old browsers. - -If you just need to support Node 12 and later, use `queueMicrotask` directly. If you need to support all versions of Node, use this package. - -## Why not use `process.nextTick`? - -In Node, `queueMicrotask` and `process.nextTick` are [essentially equivalent](https://nodejs.org/api/globals.html#globals_queuemicrotask_callback), though there are [subtle differences](https://github.com/YuzuJS/setImmediate#macrotasks-and-microtasks) that don't matter in most situations. - -You can think of `queueMicrotask` as a standardized version of `process.nextTick` that works in the browser. No need to rely on your browser bundler to shim `process` for the browser environment. - -## Why not use `setTimeout(fn, 0)`? - -This approach is the most compatible, but it has problems. Modern browsers throttle timers severely, so `setTimeout(…, 0)` usually takes at least 4ms to run. Furthermore, the throttling gets even worse if the page is backgrounded. If you have many `setTimeout` calls, then this can severely limit the performance of your program. - -## Why not use a microtask library like [`immediate`](https://www.npmjs.com/package/immediate) or [`asap`](https://www.npmjs.com/package/asap)? - -These packages are great! However, if you prioritize small JS bundle size over optimal performance in old browsers then you may want to consider this package. - -This package (`queue-microtask`) is four times smaller than `immediate`, twice as small as `asap`, and twice as small as using `process.nextTick` and letting the browser bundler shim it automatically. - -Note: This package throws an exception in JS environments which lack `Promise` support -- which are usually very old browsers and Node.js versions. - -Since the `queueMicrotask` API is supported in Node.js, Chrome, Firefox, Safari, Opera, and Edge, **the vast majority of users will get optimal performance**. Any JS environment with `Promise`, which is almost all of them, also get optimal performance. If you need support for JS environments which lack `Promise` support, use one of the alternative packages. - -## What is a shim? - -> In computer programming, a shim is a library that transparently intercepts API calls and changes the arguments passed, handles the operation itself or redirects the operation elsewhere. – [Wikipedia](https://en.wikipedia.org/wiki/Shim_(computing)) - -This package could also be described as a "ponyfill". - -> A ponyfill is almost the same as a polyfill, but not quite. Instead of patching functionality for older browsers, a ponyfill provides that functionality as a standalone module you can use. – [PonyFoo](https://ponyfoo.com/articles/polyfills-or-ponyfills) - -## API - -### `queueMicrotask(fn)` - -The `queueMicrotask()` method queues a microtask. - -The `fn` argument is a function to be executed after all pending tasks have completed but before yielding control to the browser's event loop. - -## license - -MIT. Copyright (c) [Feross Aboukhadijeh](https://feross.org). diff --git a/node_modules/queue-microtask/index.d.ts b/node_modules/queue-microtask/index.d.ts deleted file mode 100644 index b6a8646..0000000 --- a/node_modules/queue-microtask/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare const queueMicrotask: (cb: () => void) => void -export = queueMicrotask diff --git a/node_modules/queue-microtask/index.js b/node_modules/queue-microtask/index.js deleted file mode 100644 index 5560534..0000000 --- a/node_modules/queue-microtask/index.js +++ /dev/null @@ -1,9 +0,0 @@ -/*! queue-microtask. MIT License. Feross Aboukhadijeh */ -let promise - -module.exports = typeof queueMicrotask === 'function' - ? queueMicrotask.bind(typeof window !== 'undefined' ? window : global) - // reuse resolved promise, and allocate it lazily - : cb => (promise || (promise = Promise.resolve())) - .then(cb) - .catch(err => setTimeout(() => { throw err }, 0)) diff --git a/node_modules/queue-microtask/package.json b/node_modules/queue-microtask/package.json deleted file mode 100644 index d29a401..0000000 --- a/node_modules/queue-microtask/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "name": "queue-microtask", - "description": "fast, tiny `queueMicrotask` shim for modern engines", - "version": "1.2.3", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/queue-microtask/issues" - }, - "devDependencies": { - "standard": "*", - "tape": "^5.2.2" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "homepage": "https://github.com/feross/queue-microtask", - "keywords": [ - "asap", - "immediate", - "micro task", - "microtask", - "nextTick", - "process.nextTick", - "queue micro task", - "queue microtask", - "queue-microtask", - "queueMicrotask", - "setImmediate", - "task" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/queue-microtask.git" - }, - "scripts": { - "test": "standard && tape test/*.js" - } -} diff --git a/node_modules/reusify/.github/dependabot.yml b/node_modules/reusify/.github/dependabot.yml deleted file mode 100644 index 4872c5a..0000000 --- a/node_modules/reusify/.github/dependabot.yml +++ /dev/null @@ -1,7 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 diff --git a/node_modules/reusify/.github/workflows/ci.yml b/node_modules/reusify/.github/workflows/ci.yml deleted file mode 100644 index 1e30ad8..0000000 --- a/node_modules/reusify/.github/workflows/ci.yml +++ /dev/null @@ -1,96 +0,0 @@ -name: ci - -on: [push, pull_request] - -jobs: - legacy: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: ['0.10', '0.12', 4.x, 6.x, 8.x, 10.x, 12.x, 13.x, 14.x, 15.x, 16.x] - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install --production && npm install tape - - - name: Run tests - run: | - npm run test - - test: - runs-on: ubuntu-latest - - strategy: - matrix: - node-version: [18.x, 20.x, 22.x] - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Run tests - run: | - npm run test:coverage - - types: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - - - name: Install - run: | - npm install - - - name: Run types tests - run: | - npm run test:typescript - - lint: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Use Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - - - name: Install - run: | - npm install - - - name: Lint - run: | - npm run lint diff --git a/node_modules/reusify/LICENSE b/node_modules/reusify/LICENSE deleted file mode 100644 index 56d1590..0000000 --- a/node_modules/reusify/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-2024 Matteo Collina - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/node_modules/reusify/README.md b/node_modules/reusify/README.md deleted file mode 100644 index 1aaee5d..0000000 --- a/node_modules/reusify/README.md +++ /dev/null @@ -1,139 +0,0 @@ -# reusify - -[![npm version][npm-badge]][npm-url] - -Reuse your objects and functions for maximum speed. This technique will -make any function run ~10% faster. You call your functions a -lot, and it adds up quickly in hot code paths. - -``` -$ node benchmarks/createNoCodeFunction.js -Total time 53133 -Total iterations 100000000 -Iteration/s 1882069.5236482036 - -$ node benchmarks/reuseNoCodeFunction.js -Total time 50617 -Total iterations 100000000 -Iteration/s 1975620.838848608 -``` - -The above benchmark uses fibonacci to simulate a real high-cpu load. -The actual numbers might differ for your use case, but the difference -should not. - -The benchmark was taken using Node v6.10.0. - -This library was extracted from -[fastparallel](http://npm.im/fastparallel). - -## Example - -```js -var reusify = require('reusify') -var fib = require('reusify/benchmarks/fib') -var instance = reusify(MyObject) - -// get an object from the cache, -// or creates a new one when cache is empty -var obj = instance.get() - -// set the state -obj.num = 100 -obj.func() - -// reset the state. -// if the state contains any external object -// do not use delete operator (it is slow) -// prefer set them to null -obj.num = 0 - -// store an object in the cache -instance.release(obj) - -function MyObject () { - // you need to define this property - // so V8 can compile MyObject into an - // hidden class - this.next = null - this.num = 0 - - var that = this - - // this function is never reallocated, - // so it can be optimized by V8 - this.func = function () { - if (null) { - // do nothing - } else { - // calculates fibonacci - fib(that.num) - } - } -} -``` - -The above example was intended for synchronous code, let's see async: -```js -var reusify = require('reusify') -var instance = reusify(MyObject) - -for (var i = 0; i < 100; i++) { - getData(i, console.log) -} - -function getData (value, cb) { - var obj = instance.get() - - obj.value = value - obj.cb = cb - obj.run() -} - -function MyObject () { - this.next = null - this.value = null - - var that = this - - this.run = function () { - asyncOperation(that.value, that.handle) - } - - this.handle = function (err, result) { - that.cb(err, result) - that.value = null - that.cb = null - instance.release(that) - } -} -``` - -Also note how in the above examples, the code, that consumes an instance of `MyObject`, -reset the state to initial condition, just before storing it in the cache. -That's needed so that every subsequent request for an instance from the cache, -could get a clean instance. - -## Why - -It is faster because V8 doesn't have to collect all the functions you -create. On a short-lived benchmark, it is as fast as creating the -nested function, but on a longer time frame it creates less -pressure on the garbage collector. - -## Other examples -If you want to see some complex example, checkout [middie](https://github.com/fastify/middie) and [steed](https://github.com/mcollina/steed). - -## Acknowledgements - -Thanks to [Trevor Norris](https://github.com/trevnorris) for -getting me down the rabbit hole of performance, and thanks to [Mathias -Buss](http://github.com/mafintosh) for suggesting me to share this -trick. - -## License - -MIT - -[npm-badge]: https://badge.fury.io/js/reusify.svg -[npm-url]: https://badge.fury.io/js/reusify diff --git a/node_modules/reusify/SECURITY.md b/node_modules/reusify/SECURITY.md deleted file mode 100644 index dd9f1d5..0000000 --- a/node_modules/reusify/SECURITY.md +++ /dev/null @@ -1,15 +0,0 @@ -# Security Policy - -## Supported Versions - -Use this section to tell people about which versions of your project are -currently being supported with security updates. - -| Version | Supported | -| ------- | ------------------ | -| 1.x | :white_check_mark: | -| < 1.0 | :x: | - -## Reporting a Vulnerability - -Please report all vulnerabilities at [https://github.com/mcollina/fastq/security](https://github.com/mcollina/fastq/security). diff --git a/node_modules/reusify/benchmarks/createNoCodeFunction.js b/node_modules/reusify/benchmarks/createNoCodeFunction.js deleted file mode 100644 index ce1aac7..0000000 --- a/node_modules/reusify/benchmarks/createNoCodeFunction.js +++ /dev/null @@ -1,30 +0,0 @@ -'use strict' - -var fib = require('./fib') -var max = 100000000 -var start = Date.now() - -// create a funcion with the typical error -// pattern, that delegates the heavy load -// to something else -function createNoCodeFunction () { - /* eslint no-constant-condition: "off" */ - var num = 100 - - ;(function () { - if (null) { - // do nothing - } else { - fib(num) - } - })() -} - -for (var i = 0; i < max; i++) { - createNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/benchmarks/fib.js b/node_modules/reusify/benchmarks/fib.js deleted file mode 100644 index e22cc48..0000000 --- a/node_modules/reusify/benchmarks/fib.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -function fib (num) { - var fib = [] - - fib[0] = 0 - fib[1] = 1 - for (var i = 2; i <= num; i++) { - fib[i] = fib[i - 2] + fib[i - 1] - } -} - -module.exports = fib diff --git a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js b/node_modules/reusify/benchmarks/reuseNoCodeFunction.js deleted file mode 100644 index 3358d6e..0000000 --- a/node_modules/reusify/benchmarks/reuseNoCodeFunction.js +++ /dev/null @@ -1,38 +0,0 @@ -'use strict' - -var reusify = require('../') -var fib = require('./fib') -var instance = reusify(MyObject) -var max = 100000000 -var start = Date.now() - -function reuseNoCodeFunction () { - var obj = instance.get() - obj.num = 100 - obj.func() - obj.num = 0 - instance.release(obj) -} - -function MyObject () { - this.next = null - var that = this - this.num = 0 - this.func = function () { - /* eslint no-constant-condition: "off" */ - if (null) { - // do nothing - } else { - fib(that.num) - } - } -} - -for (var i = 0; i < max; i++) { - reuseNoCodeFunction() -} - -var time = Date.now() - start -console.log('Total time', time) -console.log('Total iterations', max) -console.log('Iteration/s', max / time * 1000) diff --git a/node_modules/reusify/eslint.config.js b/node_modules/reusify/eslint.config.js deleted file mode 100644 index d0a9af6..0000000 --- a/node_modules/reusify/eslint.config.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict' - -const base = require('neostandard')({}) - -module.exports = [ - ...base, - { - name: 'old-standard', - rules: { - 'no-var': 'off', - 'object-shorthand': 'off', - } - } -] diff --git a/node_modules/reusify/package.json b/node_modules/reusify/package.json deleted file mode 100644 index e47ff11..0000000 --- a/node_modules/reusify/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "reusify", - "version": "1.1.0", - "description": "Reuse objects and functions with style", - "main": "reusify.js", - "types": "reusify.d.ts", - "scripts": { - "lint": "eslint", - "test": "tape test.js", - "test:coverage": "c8 --100 tape test.js", - "test:typescript": "tsc" - }, - "pre-commit": [ - "lint", - "test", - "test:typescript" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/reusify.git" - }, - "keywords": [ - "reuse", - "object", - "performance", - "function", - "fast" - ], - "author": "Matteo Collina ", - "license": "MIT", - "bugs": { - "url": "https://github.com/mcollina/reusify/issues" - }, - "homepage": "https://github.com/mcollina/reusify#readme", - "engines": { - "node": ">=0.10.0", - "iojs": ">=1.0.0" - }, - "devDependencies": { - "@types/node": "^22.9.0", - "eslint": "^9.13.0", - "neostandard": "^0.12.0", - "pre-commit": "^1.2.2", - "tape": "^5.0.0", - "c8": "^10.1.2", - "typescript": "^5.2.2" - }, - "dependencies": { - } -} diff --git a/node_modules/reusify/reusify.d.ts b/node_modules/reusify/reusify.d.ts deleted file mode 100644 index 9ba277d..0000000 --- a/node_modules/reusify/reusify.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -interface Node { - next: Node | null; -} - -interface Constructor { - new(): T; -} - -declare function reusify(constructor: Constructor): { - get(): T; - release(node: T): void; -}; - -export = reusify; diff --git a/node_modules/reusify/reusify.js b/node_modules/reusify/reusify.js deleted file mode 100644 index e6f36f3..0000000 --- a/node_modules/reusify/reusify.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -function reusify (Constructor) { - var head = new Constructor() - var tail = head - - function get () { - var current = head - - if (current.next) { - head = current.next - } else { - head = new Constructor() - tail = head - } - - current.next = null - - return current - } - - function release (obj) { - tail.next = obj - tail = obj - } - - return { - get: get, - release: release - } -} - -module.exports = reusify diff --git a/node_modules/reusify/test.js b/node_modules/reusify/test.js deleted file mode 100644 index 929cfd7..0000000 --- a/node_modules/reusify/test.js +++ /dev/null @@ -1,66 +0,0 @@ -'use strict' - -var test = require('tape') -var reusify = require('./') - -test('reuse objects', function (t) { - t.plan(6) - - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - - t.notEqual(obj, instance.get(), 'two instance created') - t.notOk(obj.next, 'next must be null') - - instance.release(obj) - - // the internals keeps a hot copy ready for reuse - // putting this one back in the queue - instance.release(instance.get()) - - // comparing the old one with the one we got - // never do this in real code, after release you - // should never reuse that instance - t.equal(obj, instance.get(), 'instance must be reused') -}) - -test('reuse more than 2 objects', function (t) { - function MyObject () { - t.pass('constructor called') - this.next = null - } - - var instance = reusify(MyObject) - var obj = instance.get() - var obj2 = instance.get() - var obj3 = instance.get() - - t.notOk(obj.next, 'next must be null') - t.notOk(obj2.next, 'next must be null') - t.notOk(obj3.next, 'next must be null') - - t.notEqual(obj, obj2) - t.notEqual(obj, obj3) - t.notEqual(obj3, obj2) - - instance.release(obj) - instance.release(obj2) - instance.release(obj3) - - // skip one - instance.get() - - var obj4 = instance.get() - var obj5 = instance.get() - var obj6 = instance.get() - - t.equal(obj4, obj) - t.equal(obj5, obj2) - t.equal(obj6, obj3) - t.end() -}) diff --git a/node_modules/reusify/tsconfig.json b/node_modules/reusify/tsconfig.json deleted file mode 100644 index dbe862b..0000000 --- a/node_modules/reusify/tsconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "noEmit": true, - "strict": true - }, - "files": [ - "./reusify.d.ts" - ] -} diff --git a/node_modules/run-parallel/LICENSE b/node_modules/run-parallel/LICENSE deleted file mode 100644 index c7e6852..0000000 --- a/node_modules/run-parallel/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Feross Aboukhadijeh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/run-parallel/README.md b/node_modules/run-parallel/README.md deleted file mode 100644 index edc3da4..0000000 --- a/node_modules/run-parallel/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# run-parallel [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] - -[travis-image]: https://img.shields.io/travis/feross/run-parallel/master.svg -[travis-url]: https://travis-ci.org/feross/run-parallel -[npm-image]: https://img.shields.io/npm/v/run-parallel.svg -[npm-url]: https://npmjs.org/package/run-parallel -[downloads-image]: https://img.shields.io/npm/dm/run-parallel.svg -[downloads-url]: https://npmjs.org/package/run-parallel -[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg -[standard-url]: https://standardjs.com - -### Run an array of functions in parallel - -![parallel](https://raw.githubusercontent.com/feross/run-parallel/master/img.png) [![Sauce Test Status](https://saucelabs.com/browser-matrix/run-parallel.svg)](https://saucelabs.com/u/run-parallel) - -### install - -``` -npm install run-parallel -``` - -### usage - -#### parallel(tasks, [callback]) - -Run the `tasks` array of functions in parallel, without waiting until the previous -function has completed. If any of the functions pass an error to its callback, the main -`callback` is immediately called with the value of the error. Once the `tasks` have -completed, the results are passed to the final `callback` as an array. - -It is also possible to use an object instead of an array. Each property will be run as a -function and the results will be passed to the final `callback` as an object instead of -an array. This can be a more readable way of handling the results. - -##### arguments - -- `tasks` - An array or object containing functions to run. Each function is passed a -`callback(err, result)` which it must call on completion with an error `err` (which can -be `null`) and an optional `result` value. -- `callback(err, results)` - An optional callback to run once all the functions have -completed. This function gets a results array (or object) containing all the result -arguments passed to the task callbacks. - -##### example - -```js -var parallel = require('run-parallel') - -parallel([ - function (callback) { - setTimeout(function () { - callback(null, 'one') - }, 200) - }, - function (callback) { - setTimeout(function () { - callback(null, 'two') - }, 100) - } -], -// optional callback -function (err, results) { - // the results array will equal ['one','two'] even though - // the second function had a shorter timeout. -}) -``` - -This module is basically equavalent to -[`async.parallel`](https://github.com/caolan/async#paralleltasks-callback), but it's -handy to just have the one function you need instead of the kitchen sink. Modularity! -Especially handy if you're serving to the browser and need to reduce your javascript -bundle size. - -Works great in the browser with [browserify](http://browserify.org/)! - -### see also - -- [run-auto](https://github.com/feross/run-auto) -- [run-parallel-limit](https://github.com/feross/run-parallel-limit) -- [run-series](https://github.com/feross/run-series) -- [run-waterfall](https://github.com/feross/run-waterfall) - -### license - -MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/node_modules/run-parallel/index.js b/node_modules/run-parallel/index.js deleted file mode 100644 index 6307141..0000000 --- a/node_modules/run-parallel/index.js +++ /dev/null @@ -1,51 +0,0 @@ -/*! run-parallel. MIT License. Feross Aboukhadijeh */ -module.exports = runParallel - -const queueMicrotask = require('queue-microtask') - -function runParallel (tasks, cb) { - let results, pending, keys - let isSync = true - - if (Array.isArray(tasks)) { - results = [] - pending = tasks.length - } else { - keys = Object.keys(tasks) - results = {} - pending = keys.length - } - - function done (err) { - function end () { - if (cb) cb(err, results) - cb = null - } - if (isSync) queueMicrotask(end) - else end() - } - - function each (i, err, result) { - results[i] = result - if (--pending === 0 || err) { - done(err) - } - } - - if (!pending) { - // empty - done(null) - } else if (keys) { - // object - keys.forEach(function (key) { - tasks[key](function (err, result) { each(key, err, result) }) - }) - } else { - // array - tasks.forEach(function (task, i) { - task(function (err, result) { each(i, err, result) }) - }) - } - - isSync = false -} diff --git a/node_modules/run-parallel/package.json b/node_modules/run-parallel/package.json deleted file mode 100644 index 1f14757..0000000 --- a/node_modules/run-parallel/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "run-parallel", - "description": "Run an array of functions in parallel", - "version": "1.2.0", - "author": { - "name": "Feross Aboukhadijeh", - "email": "feross@feross.org", - "url": "https://feross.org" - }, - "bugs": { - "url": "https://github.com/feross/run-parallel/issues" - }, - "dependencies": { - "queue-microtask": "^1.2.2" - }, - "devDependencies": { - "airtap": "^3.0.0", - "standard": "*", - "tape": "^5.0.1" - }, - "homepage": "https://github.com/feross/run-parallel", - "keywords": [ - "parallel", - "async", - "function", - "callback", - "asynchronous", - "run", - "array", - "run parallel" - ], - "license": "MIT", - "main": "index.js", - "repository": { - "type": "git", - "url": "git://github.com/feross/run-parallel.git" - }, - "scripts": { - "test": "standard && npm run test-node && npm run test-browser", - "test-browser": "airtap -- test/*.js", - "test-browser-local": "airtap --local -- test/*.js", - "test-node": "tape test/*.js" - }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] -} diff --git a/node_modules/shebang-command/index.js b/node_modules/shebang-command/index.js deleted file mode 100644 index f35db30..0000000 --- a/node_modules/shebang-command/index.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; -const shebangRegex = require('shebang-regex'); - -module.exports = (string = '') => { - const match = string.match(shebangRegex); - - if (!match) { - return null; - } - - const [path, argument] = match[0].replace(/#! ?/, '').split(' '); - const binary = path.split('/').pop(); - - if (binary === 'env') { - return argument; - } - - return argument ? `${binary} ${argument}` : binary; -}; diff --git a/node_modules/shebang-command/license b/node_modules/shebang-command/license deleted file mode 100644 index db6bc32..0000000 --- a/node_modules/shebang-command/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Kevin Mårtensson (github.com/kevva) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-command/package.json b/node_modules/shebang-command/package.json deleted file mode 100644 index 18e3c04..0000000 --- a/node_modules/shebang-command/package.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "shebang-command", - "version": "2.0.0", - "description": "Get the command from a shebang", - "license": "MIT", - "repository": "kevva/shebang-command", - "author": { - "name": "Kevin Mårtensson", - "email": "kevinmartensson@gmail.com", - "url": "github.com/kevva" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "cmd", - "command", - "parse", - "shebang" - ], - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "devDependencies": { - "ava": "^2.3.0", - "xo": "^0.24.0" - } -} diff --git a/node_modules/shebang-command/readme.md b/node_modules/shebang-command/readme.md deleted file mode 100644 index 84feb44..0000000 --- a/node_modules/shebang-command/readme.md +++ /dev/null @@ -1,34 +0,0 @@ -# shebang-command [![Build Status](https://travis-ci.org/kevva/shebang-command.svg?branch=master)](https://travis-ci.org/kevva/shebang-command) - -> Get the command from a shebang - - -## Install - -``` -$ npm install shebang-command -``` - - -## Usage - -```js -const shebangCommand = require('shebang-command'); - -shebangCommand('#!/usr/bin/env node'); -//=> 'node' - -shebangCommand('#!/bin/bash'); -//=> 'bash' -``` - - -## API - -### shebangCommand(string) - -#### string - -Type: `string` - -String containing a shebang. diff --git a/node_modules/shebang-regex/index.d.ts b/node_modules/shebang-regex/index.d.ts deleted file mode 100644 index 61d034b..0000000 --- a/node_modules/shebang-regex/index.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** -Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line. - -@example -``` -import shebangRegex = require('shebang-regex'); - -const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; - -shebangRegex.test(string); -//=> true - -shebangRegex.exec(string)[0]; -//=> '#!/usr/bin/env node' - -shebangRegex.exec(string)[1]; -//=> '/usr/bin/env node' -``` -*/ -declare const shebangRegex: RegExp; - -export = shebangRegex; diff --git a/node_modules/shebang-regex/index.js b/node_modules/shebang-regex/index.js deleted file mode 100644 index 63fc4a0..0000000 --- a/node_modules/shebang-regex/index.js +++ /dev/null @@ -1,2 +0,0 @@ -'use strict'; -module.exports = /^#!(.*)/; diff --git a/node_modules/shebang-regex/license b/node_modules/shebang-regex/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/shebang-regex/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/shebang-regex/package.json b/node_modules/shebang-regex/package.json deleted file mode 100644 index 00ab30f..0000000 --- a/node_modules/shebang-regex/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "shebang-regex", - "version": "3.0.0", - "description": "Regular expression for matching a shebang line", - "license": "MIT", - "repository": "sindresorhus/shebang-regex", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=8" - }, - "scripts": { - "test": "xo && ava && tsd" - }, - "files": [ - "index.js", - "index.d.ts" - ], - "keywords": [ - "regex", - "regexp", - "shebang", - "match", - "test", - "line" - ], - "devDependencies": { - "ava": "^1.4.1", - "tsd": "^0.7.2", - "xo": "^0.24.0" - } -} diff --git a/node_modules/shebang-regex/readme.md b/node_modules/shebang-regex/readme.md deleted file mode 100644 index 5ecf863..0000000 --- a/node_modules/shebang-regex/readme.md +++ /dev/null @@ -1,33 +0,0 @@ -# shebang-regex [![Build Status](https://travis-ci.org/sindresorhus/shebang-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/shebang-regex) - -> Regular expression for matching a [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) line - - -## Install - -``` -$ npm install shebang-regex -``` - - -## Usage - -```js -const shebangRegex = require('shebang-regex'); - -const string = '#!/usr/bin/env node\nconsole.log("unicorns");'; - -shebangRegex.test(string); -//=> true - -shebangRegex.exec(string)[0]; -//=> '#!/usr/bin/env node' - -shebangRegex.exec(string)[1]; -//=> '/usr/bin/env node' -``` - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/shelljs/LICENSE b/node_modules/shelljs/LICENSE deleted file mode 100644 index 40a2bf6..0000000 --- a/node_modules/shelljs/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2012, Artur Adib -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/shelljs/README.md b/node_modules/shelljs/README.md deleted file mode 100644 index a0de676..0000000 --- a/node_modules/shelljs/README.md +++ /dev/null @@ -1,949 +0,0 @@ -# ShellJS - Unix shell commands for Node.js - -[![GitHub Actions](https://img.shields.io/github/actions/workflow/status/shelljs/shelljs/main.yml?style=flat-square&logo=github)](https://github.com/shelljs/shelljs/actions/workflows/main.yml) -[![Codecov](https://img.shields.io/codecov/c/github/shelljs/shelljs/main.svg?style=flat-square&label=coverage)](https://codecov.io/gh/shelljs/shelljs) -[![npm version](https://img.shields.io/npm/v/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) -[![npm downloads](https://img.shields.io/npm/dm/shelljs.svg?style=flat-square)](https://www.npmjs.com/package/shelljs) - -ShellJS is a portable **(Windows/Linux/macOS)** implementation of Unix shell -commands on top of the Node.js API. You can use it to eliminate your shell -script's dependency on Unix while still keeping its familiar and powerful -commands. You can also install it globally so you can run it from outside Node -projects - say goodbye to those gnarly Bash scripts! - -ShellJS is proudly tested on every LTS node release since `v18`! - -The project is unit-tested and battle-tested in projects like: - -+ [Firebug](http://getfirebug.com/) - Firefox's infamous debugger -+ [JSHint](http://jshint.com) & [ESLint](http://eslint.org/) - popular JavaScript linters -+ [Zepto](http://zeptojs.com) - jQuery-compatible JavaScript library for modern browsers -+ [Yeoman](http://yeoman.io/) - Web application stack and development tool -+ [Deployd.com](http://deployd.com) - Open source PaaS for quick API backend generation -+ And [many more](https://npmjs.org/browse/depended/shelljs). - -If you have feedback, suggestions, or need help, feel free to post in our [issue -tracker](https://github.com/shelljs/shelljs/issues). - -Think ShellJS is cool? Check out some related projects in our [Wiki -page](https://github.com/shelljs/shelljs/wiki)! - -Upgrading from an older version? Check out our [breaking -changes](https://github.com/shelljs/shelljs/wiki/Breaking-Changes) page to see -what changes to watch out for while upgrading. - -## Command line use - -If you just want cross platform UNIX commands, checkout our new project -[shelljs/shx](https://github.com/shelljs/shx), a utility to expose `shelljs` to -the command line. - -For example: - -``` -$ shx mkdir -p foo -$ shx touch foo/bar.txt -$ shx rm -rf foo -``` - -## Plugin API - -ShellJS now supports third-party plugins! You can learn more about using plugins -and writing your own ShellJS commands in [the -wiki](https://github.com/shelljs/shelljs/wiki/Using-ShellJS-Plugins). - -## A quick note about the docs - -For documentation on all the latest features, check out our -[README](https://github.com/shelljs/shelljs). To read docs that are consistent -with the latest release, check out [the npm -page](https://www.npmjs.com/package/shelljs). - -## Installing - -Via npm: - -```bash -$ npm install [-g] shelljs -``` - -## Examples - -```javascript -var shell = require('shelljs'); - -if (!shell.which('git')) { - shell.echo('Sorry, this script requires git'); - shell.exit(1); -} - -// Copy files to release dir -shell.rm('-rf', 'out/Release'); -shell.cp('-R', 'stuff/', 'out/Release'); - -// Replace macros in each .js file -shell.cd('lib'); -shell.ls('*.js').forEach(function (file) { - shell.sed('-i', 'BUILD_VERSION', 'v0.1.2', file); - shell.sed('-i', /^.*REMOVE_THIS_LINE.*$/, '', file); - shell.sed('-i', /.*REPLACE_LINE_WITH_MACRO.*\n/, shell.cat('macro.js'), file); -}); -shell.cd('..'); - -// Run external tool synchronously -if (shell.exec('git commit -am "Auto-commit"').code !== 0) { - shell.echo('Error: Git commit failed'); - shell.exit(1); -} -``` - -## Exclude options - -If you need to pass a parameter that looks like an option, you can do so like: - -```js -shell.grep('--', '-v', 'path/to/file'); // Search for "-v", no grep options - -shell.cp('-R', '-dir', 'outdir'); // If already using an option, you're done -``` - -## Global vs. Local - -We no longer recommend using a global-import for ShellJS (i.e. -`require('shelljs/global')`). While still supported for convenience, this -pollutes the global namespace, and should therefore only be used with caution. - -Instead, we recommend a local import (standard for npm packages): - -```javascript -var shell = require('shelljs'); -shell.echo('hello world'); -``` - -Alternatively, we also support importing as a module with: - -```javascript -import shell from 'shelljs'; -shell.echo('hello world'); -``` - - - - -## Command reference - - -All commands run synchronously, unless otherwise stated. -All commands accept standard bash globbing characters (`*`, `?`, etc.), -compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). - -For less-commonly used commands and features, please check out our [wiki -page](https://github.com/shelljs/shelljs/wiki). - - -### cat([options,] file [, file ...]) -### cat([options,] file_array) - -Available options: - -+ `-n`: number all output lines - -Examples: - -```javascript -var str = cat('file*.txt'); -var str = cat('file1', 'file2'); -var str = cat(['file1', 'file2']); // same as above -``` - -Returns a [ShellString](#shellstringstr) containing the given file, or a -concatenated string containing the files if more than one file is given (a -new line character is introduced between each file). - - -### cd([dir]) - -Changes to directory `dir` for the duration of the script. Changes to home -directory if no argument is supplied. Returns a -[ShellString](#shellstringstr) to indicate success or failure. - - -### chmod([options,] octal_mode || octal_string, file) -### chmod([options,] symbolic_mode, file) - -Available options: - -+ `-v`: output a diagnostic for every file processed -+ `-c`: like verbose, but report only when a change is made -+ `-R`: change files and directories recursively - -Examples: - -```javascript -chmod(755, '/Users/brandon'); -chmod('755', '/Users/brandon'); // same as above -chmod('u+x', '/Users/brandon'); -chmod('-R', 'a-w', '/Users/brandon'); -``` - -Alters the permissions of a file or directory by either specifying the -absolute permissions in octal form or expressing the changes in symbols. -This command tries to mimic the POSIX behavior as much as possible. -Notable exceptions: - -+ In symbolic modes, `a-r` and `-r` are identical. No consideration is - given to the `umask`. -+ There is no "quiet" option, since default behavior is to run silent. -+ Windows OS uses a very different permission model than POSIX. `chmod()` - does its best on Windows, but there are limits to how file permissions can - be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, - so cross-platform compatibility should not be a concern there. - -Returns a [ShellString](#shellstringstr) indicating success or failure. - - -### cmd(arg1[, arg2, ...] [, options]) - -Available options: - -+ `cwd: directoryPath`: change the current working directory only for this - cmd() invocation. -+ `maxBuffer: num`: Raise or decrease the default buffer size for - stdout/stderr. -+ `timeout`: Change the default timeout. - -Examples: - -```javascript -var version = cmd('node', '--version').stdout; -cmd('git', 'commit', '-am', `Add suport for node ${version}`); -console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) -console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) -``` - -Executes the given command synchronously. This is intended as an easier -alternative for [exec()](#execcommand--options--callback), with better -security around globbing, comamnd injection, and variable expansion. This is -guaranteed to only run one external command, and won't give special -treatment for any shell characters (ex. this treats `|` as a literal -character, not as a shell pipeline). -This returns a [ShellString](#shellstringstr). - -By default, this performs globbing on all platforms, but you can disable -this with `set('-f')`. - -This **does not** support asynchronous mode. If you need asynchronous -command execution, check out [execa](https://www.npmjs.com/package/execa) or -the node builtin `child_process.execFile()` instead. - - -### cp([options,] source [, source ...], dest) -### cp([options,] source_array, dest) - -Available options: - -+ `-f`: force (default behavior) -+ `-n`: no-clobber -+ `-u`: only copy if `source` is newer than `dest` -+ `-r`, `-R`: recursive -+ `-L`: follow symlinks -+ `-P`: don't follow symlinks -+ `-p`: preserve file mode, ownership, and timestamps - -Examples: - -```javascript -cp('file1', 'dir1'); -cp('-R', 'path/to/dir/', '~/newCopy/'); -cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); -cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above -``` - -Copies files. Returns a [ShellString](#shellstringstr) indicating success -or failure. - - -### pushd([options,] [dir | '-N' | '+N']) - -Available options: - -+ `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. -+ `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -+ `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. - -Examples: - -```javascript -// process.cwd() === '/usr' -pushd('/etc'); // Returns /etc /usr -pushd('+1'); // Returns /usr /etc -``` - -Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. - - -### popd([options,] ['-N' | '+N']) - -Available options: - -+ `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. -+ `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. - -Examples: - -```javascript -echo(process.cwd()); // '/usr' -pushd('/etc'); // '/etc /usr' -echo(process.cwd()); // '/etc' -popd(); // '/usr' -echo(process.cwd()); // '/usr' -``` - -When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. - - -### dirs([options | '+N' | '-N']) - -Available options: - -+ `-c`: Clears the directory stack by deleting all of the elements. -+ `-q`: Suppresses output to the console. - -Arguments: - -+ `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. -+ `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. - -Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. - -See also: `pushd`, `popd` - - -### echo([options,] string [, string ...]) - -Available options: - -+ `-e`: interpret backslash escapes (default) -+ `-n`: remove trailing newline from output - -Examples: - -```javascript -echo('hello world'); -var str = echo('hello world'); -echo('-n', 'no newline at end'); -``` - -Prints `string` to stdout, and returns a [ShellString](#shellstringstr). - - -### exec(command [, options] [, callback]) - -Available options: - -+ `async`: Asynchronous execution. If a callback is provided, it will be set to - `true`, regardless of the passed value (default: `false`). -+ `fatal`: Exit upon error (default: `false`). -+ `silent`: Do not echo program output to console (default: `false`). -+ `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and - what is written to stdout and stderr when not in silent mode (default: `'utf8'`). -+ and any option available to Node.js's - [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) - -Examples: - -```javascript -var version = exec('node --version', {silent:true}).stdout; - -var child = exec('some_long_running_process', {async:true}); -child.stdout.on('data', function(data) { - /* ... do something with data ... */ -}); - -exec('some_long_running_process', function(code, stdout, stderr) { - console.log('Exit code:', code); - console.log('Program output:', stdout); - console.log('Program stderr:', stderr); -}); -``` - -Executes the given `command` _synchronously_, unless otherwise specified. -When in synchronous mode, this returns a [ShellString](#shellstringstr). -Otherwise, this returns the child process object, and the `callback` -receives the arguments `(code, stdout, stderr)`. - -Not seeing the behavior you want? `exec()` runs everything through `sh` -by default (or `cmd.exe` on Windows), which differs from `bash`. If you -need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. - -**Security note:** as `shell.exec()` executes an arbitrary string in the -system shell, it is **critical** to properly sanitize user input to avoid -**command injection**. For more context, consult the [Security -Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). - - -### find(path [, path ...]) -### find(path_array) - -Examples: - -```javascript -find('src', 'lib'); -find(['src', 'lib']); // same as above -find('.').filter(function(file) { return file.match(/\.js$/); }); -``` - -Returns a [ShellString](#shellstringstr) (with array-like properties) of all -files (however deep) in the given paths. - -The main difference from `ls('-R', path)` is that the resulting file names -include the base directories (e.g., `lib/resources/file1` instead of just `file1`). - - -### grep([options,] regex_filter, file [, file ...]) -### grep([options,] regex_filter, file_array) - -Available options: - -+ `-v`: Invert `regex_filter` (only print non-matching lines). -+ `-l`: Print only filenames of matching files. -+ `-i`: Ignore case. -+ `-n`: Print line numbers. -+ `-B `: Show `` lines before each result. -+ `-A `: Show `` lines after each result. -+ `-C `: Show `` lines before and after each result. -B and -A override this option. - -Examples: - -```javascript -grep('-v', 'GLOBAL_VARIABLE', '*.js'); -grep('GLOBAL_VARIABLE', '*.js'); -grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); -grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); -grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); -``` - -Reads input string from given files and returns a -[ShellString](#shellstringstr) containing all lines of the @ file that match -the given `regex_filter`. - - -### head([{'-n': \},] file [, file ...]) -### head([{'-n': \},] file_array) - -Available options: - -+ `-n `: Show the first `` lines of the files - -Examples: - -```javascript -var str = head({'-n': 1}, 'file*.txt'); -var str = head('file1', 'file2'); -var str = head(['file1', 'file2']); // same as above -``` - -Read the start of a `file`. Returns a [ShellString](#shellstringstr). - - -### ln([options,] source, dest) - -Available options: - -+ `-s`: symlink -+ `-f`: force - -Examples: - -```javascript -ln('file', 'newlink'); -ln('-sf', 'file', 'existing'); -``` - -Links `source` to `dest`. Use `-f` to force the link, should `dest` already -exist. Returns a [ShellString](#shellstringstr) indicating success or -failure. - - -### ls([options,] [path, ...]) -### ls([options,] path_array) - -Available options: - -+ `-R`: recursive -+ `-A`: all files (include files beginning with `.`, except for `.` and `..`) -+ `-L`: follow symlinks -+ `-d`: list directories themselves, not their contents -+ `-l`: provides more details for each file. Specifically, each file is - represented by a structured object with separate fields for file - metadata (see - [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The - return value also overrides `.toString()` to resemble `ls -l`'s - output format for human readability, but programmatic usage should - depend on the stable object format rather than the `.toString()` - representation. - -Examples: - -```javascript -ls('projs/*.js'); -ls('projs/**/*.js'); // Find all js files recursively in projs -ls('-R', '/users/me', '/tmp'); -ls('-R', ['/users/me', '/tmp']); // same as above -ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} -``` - -Returns a [ShellString](#shellstringstr) (with array-like properties) of all -the files in the given `path`, or files in the current directory if no -`path` is provided. - - -### mkdir([options,] dir [, dir ...]) -### mkdir([options,] dir_array) - -Available options: - -+ `-p`: full path (and create intermediate directories, if necessary) - -Examples: - -```javascript -mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); -mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above -``` - -Creates directories. Returns a [ShellString](#shellstringstr) indicating -success or failure. - - -### mv([options ,] source [, source ...], dest') -### mv([options ,] source_array, dest') - -Available options: - -+ `-f`: force (default behavior) -+ `-n`: no-clobber - -Examples: - -```javascript -mv('-n', 'file', 'dir/'); -mv('file1', 'file2', 'dir/'); -mv(['file1', 'file2'], 'dir/'); // same as above -``` - -Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) -indicating success or failure. - - -### pwd() - -Returns the current directory as a [ShellString](#shellstringstr). - - -### rm([options,] file [, file ...]) -### rm([options,] file_array) - -Available options: - -+ `-f`: force -+ `-r, -R`: recursive - -Examples: - -```javascript -rm('-rf', '/tmp/*'); -rm('some_file.txt', 'another_file.txt'); -rm(['some_file.txt', 'another_file.txt']); // same as above -``` - -Removes files. Returns a [ShellString](#shellstringstr) indicating success -or failure. - - -### sed([options,] search_regex, replacement, file [, file ...]) -### sed([options,] search_regex, replacement, file_array) - -Available options: - -+ `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ - -Examples: - -```javascript -sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); -``` - -Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on -each of the lines from the input string using the given `search_regex` and `replacement` string or -function. Returns the new [ShellString](#shellstringstr) after replacement. - -Note: - -Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified -using the `$n` syntax: - -```javascript -sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); -``` - -Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file -(split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') -will not match anything and nothing will be replaced. - - -### set(options) - -Available options: - -+ `+/-e`: exit upon error (`config.fatal`) -+ `+/-v`: verbose: show all commands (`config.verbose`) -+ `+/-f`: disable filename expansion (globbing) - -Examples: - -```javascript -set('-e'); // exit upon first error -set('+e'); // this undoes a "set('-e')" -``` - -Sets global configuration variables. - - -### sort([options,] file [, file ...]) -### sort([options,] file_array) - -Available options: - -+ `-r`: Reverse the results -+ `-n`: Compare according to numerical value - -Examples: - -```javascript -sort('foo.txt', 'bar.txt'); -sort('-r', 'foo.txt'); -``` - -Return the contents of the `file`s, sorted line-by-line as a -[ShellString](#shellstringstr). Sorting multiple files mixes their content -(just as unix `sort` does). - - -### tail([{'-n': \},] file [, file ...]) -### tail([{'-n': \},] file_array) - -Available options: - -+ `-n `: Show the last `` lines of `file`s - -Examples: - -```javascript -var str = tail({'-n': 1}, 'file*.txt'); -var str = tail('file1', 'file2'); -var str = tail(['file1', 'file2']); // same as above -``` - -Read the end of a `file`. Returns a [ShellString](#shellstringstr). - - -### tempdir() - -Examples: - -```javascript -var tmp = tempdir(); // "/tmp" for most *nix platforms -``` - -Searches and returns string containing a writeable, platform-dependent temporary directory. -Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). - - -### test(expression) - -Available expression primaries: - -+ `'-b', 'path'`: true if path is a block device -+ `'-c', 'path'`: true if path is a character device -+ `'-d', 'path'`: true if path is a directory -+ `'-e', 'path'`: true if path exists -+ `'-f', 'path'`: true if path is a regular file -+ `'-L', 'path'`: true if path is a symbolic link -+ `'-p', 'path'`: true if path is a pipe (FIFO) -+ `'-S', 'path'`: true if path is a socket - -Examples: - -```javascript -if (test('-d', path)) { /* do something with dir */ }; -if (!test('-f', path)) continue; // skip if it's not a regular file -``` - -Evaluates `expression` using the available primaries and returns -corresponding boolean value. - - -### ShellString.prototype.to(file) - -Examples: - -```javascript -cat('input.txt').to('output.txt'); -``` - -Analogous to the redirection operator `>` in Unix, but works with -`ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix -redirections, `to()` will overwrite any existing file!_ Returns the same -[ShellString](#shellstringstr) this operated on, to support chaining. - - -### ShellString.prototype.toEnd(file) - -Examples: - -```javascript -cat('input.txt').toEnd('output.txt'); -``` - -Analogous to the redirect-and-append operator `>>` in Unix, but works with -`ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the -same [ShellString](#shellstringstr) this operated on, to support chaining. - - -### touch([options,] file [, file ...]) -### touch([options,] file_array) - -Available options: - -+ `-a`: Change only the access time -+ `-c`: Do not create any files -+ `-m`: Change only the modification time -+ `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) - instead of current time -+ `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current - time - -Examples: - -```javascript -touch('source.js'); -touch('-c', 'path/to/file.js'); -touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); -touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); -touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); -``` - -Update the access and modification times of each file to the current time. -A file argument that does not exist is created empty, unless `-c` is supplied. -This is a partial implementation of -[`touch(1)`](http://linux.die.net/man/1/touch). Returns a -[ShellString](#shellstringstr) indicating success or failure. - - -### uniq([options,] [input, [output]]) - -Available options: - -+ `-i`: Ignore case while comparing -+ `-c`: Prefix lines by the number of occurrences -+ `-d`: Only print duplicate lines, one for each group of identical lines - -Examples: - -```javascript -uniq('foo.txt'); -uniq('-i', 'foo.txt'); -uniq('-cd', 'foo.txt', 'bar.txt'); -``` - -Filter adjacent matching lines from `input`. Returns a -[ShellString](#shellstringstr). - - -### which(command) - -Examples: - -```javascript -var nodeExec = which('node'); -``` - -Searches for `command` in the system's `PATH`. On Windows, this uses the -`PATHEXT` variable to append the extension if it's not already executable. -Returns a [ShellString](#shellstringstr) containing the absolute path to -`command`. - - -### exit(code) - -Exits the current process with the given exit `code`. - -### error() - -Tests if error occurred in the last command. Returns a truthy value if an -error returned, or a falsy value otherwise. - -**Note**: do not rely on the -return value to be an error message. If you need the last error message, use -the `.stderr` attribute from the last command's return value instead. - - -### errorCode() - -Returns the error code from the last command. - - -### ShellString(str) - -Examples: - -```javascript -var foo = new ShellString('hello world'); -``` - -This is a dedicated type returned by most ShellJS methods, which wraps a -string (or array) value. This has all the string (or array) methods, but -also exposes extra methods: [`.to()`](#shellstringprototypetofile), -[`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods -(ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string -by calling `.toString()`. - -This type also exposes the corresponding command's stdout, stderr, and -return status code via the `.stdout` (string), `.stderr` (string), and -`.code` (number) properties respectively. - - -### env['VAR_NAME'] - -Object containing environment variables (both getter and setter). Shortcut -to `process.env`. - -### Pipes - -Examples: - -```javascript -grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); -echo("files with o's in the name:\n" + ls().grep('o')); -cat('test.js').exec('node'); // pipe to exec() call -``` - -Commands can send their output to another command in a pipe-like fashion. -`sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand -side of a pipe. Pipes can be chained. - -## Configuration - - -### config.silent - -Example: - -```javascript -var sh = require('shelljs'); -var silentState = sh.config.silent; // save old silent state -sh.config.silent = true; -/* ... */ -sh.config.silent = silentState; // restore old silent state -``` - -Suppresses all command output if `true`, except for `echo()` calls. -Default is `false`. - -### config.fatal - -Example: - -```javascript -require('shelljs/global'); -config.fatal = true; // or set('-e'); -cp('this_file_does_not_exist', '/dev/null'); // throws Error here -/* more commands... */ -``` - -If `true`, the script will throw a Javascript error when any shell.js -command encounters an error. Default is `false`. This is analogous to -Bash's `set -e`. - -### config.verbose - -Example: - -```javascript -config.verbose = true; // or set('-v'); -cd('dir/'); -rm('-rf', 'foo.txt', 'bar.txt'); -exec('echo hello'); -``` - -Will print each command as follows: - -``` -cd dir/ -rm -rf foo.txt bar.txt -exec echo hello -``` - -### config.globOptions (deprecated) - -**Deprecated**: we recommend that you do not edit `config.globOptions`. -Support for this configuration option may be changed or removed in a future -ShellJS release. - -**Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS -v0.9.x, `config.globOptions` is compatible with `fast-glob`. - -Example: - -```javascript -config.globOptions = {nodir: true}; -``` - -`config.globOptions` changes how ShellJS expands glob (wildcard) -expressions. See -[fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) -for available options. Be aware that modifying `config.globOptions` **may -break ShellJS functionality.** - -### config.reset() - -Example: - -```javascript -var shell = require('shelljs'); -// Make changes to shell.config, and do stuff... -/* ... */ -shell.config.reset(); // reset to original state -// Do more stuff, but with original settings -/* ... */ -``` - -Reset `shell.config` to the defaults: - -```javascript -{ - fatal: false, - globOptions: {}, - maxdepth: 255, - noglob: false, - silent: false, - verbose: false, -} -``` - -## Team - -| [![Nate Fischer](https://avatars.githubusercontent.com/u/5801521?s=130)](https://github.com/nfischer) | -|:---:| -| [Nate Fischer](https://github.com/nfischer) | diff --git a/node_modules/shelljs/global.js b/node_modules/shelljs/global.js deleted file mode 100644 index e061f5a..0000000 --- a/node_modules/shelljs/global.js +++ /dev/null @@ -1,15 +0,0 @@ -/* eslint no-extend-native: 0 */ -var shell = require('./shell'); -var common = require('./src/common'); - -Object.keys(shell).forEach(function (cmd) { - global[cmd] = shell[cmd]; -}); - -var _to = require('./src/to'); - -String.prototype.to = common.wrap('to', _to); - -var _toEnd = require('./src/toEnd'); - -String.prototype.toEnd = common.wrap('toEnd', _toEnd); diff --git a/node_modules/shelljs/make.js b/node_modules/shelljs/make.js deleted file mode 100644 index a8438c8..0000000 --- a/node_modules/shelljs/make.js +++ /dev/null @@ -1,57 +0,0 @@ -require('./global'); - -global.config.fatal = true; -global.target = {}; - -var args = process.argv.slice(2), - targetArgs, - dashesLoc = args.indexOf('--'); - -// split args, everything after -- if only for targets -if (dashesLoc > -1) { - targetArgs = args.slice(dashesLoc + 1, args.length); - args = args.slice(0, dashesLoc); -} - -// This ensures we only execute the script targets after the entire script has -// been evaluated -setTimeout(function() { - var t; - - if (args.length === 1 && args[0] === '--help') { - console.log('Available targets:'); - for (t in global.target) - console.log(' ' + t); - return; - } - - // Wrap targets to prevent duplicate execution - for (t in global.target) { - (function(t, oldTarget){ - - // Wrap it - global.target[t] = function() { - if (!oldTarget.done){ - oldTarget.done = true; - oldTarget.result = oldTarget.apply(oldTarget, arguments); - } - return oldTarget.result; - }; - - })(t, global.target[t]); - } - - // Execute desired targets - if (args.length > 0) { - args.forEach(function(arg) { - if (arg in global.target) - global.target[arg](targetArgs); - else { - console.log('no such target: ' + arg); - } - }); - } else if ('all' in global.target) { - global.target.all(targetArgs); - } - -}, 0); diff --git a/node_modules/shelljs/package.json b/node_modules/shelljs/package.json deleted file mode 100644 index a5c3299..0000000 --- a/node_modules/shelljs/package.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "name": "shelljs", - "version": "0.10.0", - "description": "Portable Unix shell commands for Node.js", - "keywords": [ - "shelljs", - "bash", - "unix", - "shell", - "makefile", - "make", - "jake", - "synchronous" - ], - "contributors": [ - "Nate Fischer (https://github.com/nfischer)", - "Brandon Freitag (https://github.com/freitagbr)" - ], - "repository": { - "type": "git", - "url": "git://github.com/shelljs/shelljs.git" - }, - "license": "BSD-3-Clause", - "homepage": "http://github.com/shelljs/shelljs", - "main": "./shell.js", - "exports": { - ".": "./shell.js", - "./global": "./global.js", - "./global.js": "./global.js", - "./make": "./make.js", - "./make.js": "./make.js", - "./package": "./package.json", - "./package.json": "./package.json", - "./plugin": "./plugin.js", - "./plugin.js": "./plugin.js" - }, - "files": [ - "global.js", - "make.js", - "plugin.js", - "shell.js", - "src" - ], - "scripts": { - "check-node-support": "node scripts/check-node-support", - "posttest": "npm run lint", - "test": "ava", - "test-with-coverage": "nyc --reporter=text --reporter=lcov ava", - "gendocs": "node scripts/generate-docs", - "lint": "eslint .", - "after-travis": "travis-check-changes", - "changelog": "shelljs-changelog", - "release:major": "shelljs-release major", - "release:minor": "shelljs-release minor", - "release:patch": "shelljs-release patch" - }, - "dependencies": { - "execa": "^5.1.1", - "fast-glob": "^3.3.2" - }, - "ava": { - "serial": true, - "workerThreads": false, - "powerAssert": false, - "files": [ - "test/*.js" - ], - "helpers": [ - "test/resources/**", - "test/utils/**" - ] - }, - "devDependencies": { - "ava": "^6.2.0", - "chalk": "^4.1.2", - "coffee-script": "^1.12.7", - "eslint": "^8.2.0", - "eslint-config-airbnb-base": "^15.0.0", - "eslint-plugin-import": "^2.31.0", - "js-yaml": "^4.1.0", - "nyc": "^17.1.0", - "shelljs-changelog": "^0.2.6", - "shelljs-release": "^0.5.3", - "shx": "^0.4.0", - "travis-check-changes": "^0.5.1" - }, - "engines": { - "node": ">=18" - } -} diff --git a/node_modules/shelljs/plugin.js b/node_modules/shelljs/plugin.js deleted file mode 100644 index 2e15850..0000000 --- a/node_modules/shelljs/plugin.js +++ /dev/null @@ -1,16 +0,0 @@ -// Various utilities exposed to plugins - -require('./shell'); // Create the ShellJS instance (mandatory) - -var common = require('./src/common'); - -var exportedAttributes = [ - 'error', // For signaling errors from within commands - 'parseOptions', // For custom option parsing - 'readFromPipe', // For commands with the .canReceivePipe attribute - 'register', // For registering plugins -]; - -exportedAttributes.forEach(function (attr) { - exports[attr] = common[attr]; -}); diff --git a/node_modules/shelljs/shell.js b/node_modules/shelljs/shell.js deleted file mode 100644 index 8a3a67d..0000000 --- a/node_modules/shelljs/shell.js +++ /dev/null @@ -1,216 +0,0 @@ -// -// ShellJS -// Unix shell commands on top of Node's API -// -// Copyright (c) 2012 Artur Adib -// http://github.com/shelljs/shelljs -// - -var common = require('./src/common'); - -module.exports = common.shell; - -//@ -//@ All commands run synchronously, unless otherwise stated. -//@ All commands accept standard bash globbing characters (`*`, `?`, etc.), -//@ compatible with [`fast-glob`](https://www.npmjs.com/package/fast-glob). -//@ -//@ For less-commonly used commands and features, please check out our [wiki -//@ page](https://github.com/shelljs/shelljs/wiki). -//@ - -// Include the docs for all the default commands -//@commands - -// Load all default commands. We import these for their side effect of loading -// using the plugin architecture via `common.register()`. -require('./src/cat'); -require('./src/cd'); -require('./src/chmod'); -require('./src/cmd'); -require('./src/cp'); -require('./src/dirs'); -require('./src/echo'); -require('./src/exec'); -require('./src/exec-child'); // A hint to the bundler to keep exec-child.js -require('./src/find'); -require('./src/grep'); -require('./src/head'); -require('./src/ln'); -require('./src/ls'); -require('./src/mkdir'); -require('./src/mv'); -require('./src/popd'); -require('./src/pushd'); -require('./src/pwd'); -require('./src/rm'); -require('./src/sed'); -require('./src/set'); -require('./src/sort'); -require('./src/tail'); -require('./src/tempdir'); -require('./src/test'); -require('./src/to'); -require('./src/toEnd'); -require('./src/touch'); -require('./src/uniq'); -require('./src/which'); - -//@ -//@ ### exit(code) -//@ -//@ Exits the current process with the given exit `code`. -module.exports.exit = function exit(code) { - common.state.error = null; - common.state.errorCode = 0; - if (code) { - common.error('exit', { - continue: true, - code, - prefix: '', - silent: true, - fatal: false, - }); - process.exit(code); - } else { - process.exit(); - } -}; - -//@include ./src/error.js -module.exports.error = require('./src/error'); - -//@include ./src/errorCode.js -module.exports.errorCode = require('./src/errorCode'); - -//@include ./src/common.js -module.exports.ShellString = common.ShellString; - -//@ -//@ ### env['VAR_NAME'] -//@ -//@ Object containing environment variables (both getter and setter). Shortcut -//@ to `process.env`. -module.exports.env = process.env; - -//@ -//@ ### Pipes -//@ -//@ Examples: -//@ -//@ ```javascript -//@ grep('foo', 'file1.txt', 'file2.txt').sed(/o/g, 'a').to('output.txt'); -//@ echo("files with o's in the name:\n" + ls().grep('o')); -//@ cat('test.js').exec('node'); // pipe to exec() call -//@ ``` -//@ -//@ Commands can send their output to another command in a pipe-like fashion. -//@ `sed`, `grep`, `cat`, `exec`, `to`, and `toEnd` can appear on the right-hand -//@ side of a pipe. Pipes can be chained. - -//@ -//@ ## Configuration -//@ - -module.exports.config = common.config; - -//@ -//@ ### config.silent -//@ -//@ Example: -//@ -//@ ```javascript -//@ var sh = require('shelljs'); -//@ var silentState = sh.config.silent; // save old silent state -//@ sh.config.silent = true; -//@ /* ... */ -//@ sh.config.silent = silentState; // restore old silent state -//@ ``` -//@ -//@ Suppresses all command output if `true`, except for `echo()` calls. -//@ Default is `false`. - -//@ -//@ ### config.fatal -//@ -//@ Example: -//@ -//@ ```javascript -//@ require('shelljs/global'); -//@ config.fatal = true; // or set('-e'); -//@ cp('this_file_does_not_exist', '/dev/null'); // throws Error here -//@ /* more commands... */ -//@ ``` -//@ -//@ If `true`, the script will throw a Javascript error when any shell.js -//@ command encounters an error. Default is `false`. This is analogous to -//@ Bash's `set -e`. - -//@ -//@ ### config.verbose -//@ -//@ Example: -//@ -//@ ```javascript -//@ config.verbose = true; // or set('-v'); -//@ cd('dir/'); -//@ rm('-rf', 'foo.txt', 'bar.txt'); -//@ exec('echo hello'); -//@ ``` -//@ -//@ Will print each command as follows: -//@ -//@ ``` -//@ cd dir/ -//@ rm -rf foo.txt bar.txt -//@ exec echo hello -//@ ``` - -//@ -//@ ### config.globOptions (deprecated) -//@ -//@ **Deprecated**: we recommend that you do not edit `config.globOptions`. -//@ Support for this configuration option may be changed or removed in a future -//@ ShellJS release. -//@ -//@ **Breaking change**: ShellJS v0.8.x uses `node-glob`. Starting with ShellJS -//@ v0.9.x, `config.globOptions` is compatible with `fast-glob`. -//@ -//@ Example: -//@ -//@ ```javascript -//@ config.globOptions = {nodir: true}; -//@ ``` -//@ -//@ `config.globOptions` changes how ShellJS expands glob (wildcard) -//@ expressions. See -//@ [fast-glob](https://github.com/mrmlnc/fast-glob?tab=readme-ov-file#options-3) -//@ for available options. Be aware that modifying `config.globOptions` **may -//@ break ShellJS functionality.** - -//@ -//@ ### config.reset() -//@ -//@ Example: -//@ -//@ ```javascript -//@ var shell = require('shelljs'); -//@ // Make changes to shell.config, and do stuff... -//@ /* ... */ -//@ shell.config.reset(); // reset to original state -//@ // Do more stuff, but with original settings -//@ /* ... */ -//@ ``` -//@ -//@ Reset `shell.config` to the defaults: -//@ -//@ ```javascript -//@ { -//@ fatal: false, -//@ globOptions: {}, -//@ maxdepth: 255, -//@ noglob: false, -//@ silent: false, -//@ verbose: false, -//@ } -//@ ``` diff --git a/node_modules/shelljs/src/cat.js b/node_modules/shelljs/src/cat.js deleted file mode 100644 index ca264a9..0000000 --- a/node_modules/shelljs/src/cat.js +++ /dev/null @@ -1,76 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('cat', _cat, { - canReceivePipe: true, - cmdOptions: { - 'n': 'number', - }, -}); - -//@ -//@ ### cat([options,] file [, file ...]) -//@ ### cat([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-n`: number all output lines -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = cat('file*.txt'); -//@ var str = cat('file1', 'file2'); -//@ var str = cat(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) containing the given file, or a -//@ concatenated string containing the files if more than one file is given (a -//@ new line character is introduced between each file). -function _cat(options, files) { - var cat = common.readFromPipe(); - - if (!files && !cat) common.error('no paths given'); - - files = [].slice.call(arguments, 1); - - files.forEach(function (file) { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file); - } else if (common.statFollowLinks(file).isDirectory()) { - common.error(file + ': Is a directory'); - } - - cat += fs.readFileSync(file, 'utf8'); - }); - - if (options.number) { - cat = addNumbers(cat); - } - - return cat; -} -module.exports = _cat; - -function addNumbers(cat) { - var lines = cat.split('\n'); - var lastLine = lines.pop(); - - lines = lines.map(function (line, i) { - return numberedLine(i + 1, line); - }); - - if (lastLine.length) { - lastLine = numberedLine(lines.length + 1, lastLine); - } - lines.push(lastLine); - - return lines.join('\n'); -} - -function numberedLine(n, line) { - // GNU cat use six pad start number + tab. See http://lingrok.org/xref/coreutils/src/cat.c#57 - // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/padStart - var number = (' ' + n).slice(-6) + '\t'; - return number + line; -} diff --git a/node_modules/shelljs/src/cd.js b/node_modules/shelljs/src/cd.js deleted file mode 100644 index 1c6e73f..0000000 --- a/node_modules/shelljs/src/cd.js +++ /dev/null @@ -1,40 +0,0 @@ -var os = require('os'); -var common = require('./common'); - -common.register('cd', _cd, {}); - -//@ -//@ ### cd([dir]) -//@ -//@ Changes to directory `dir` for the duration of the script. Changes to home -//@ directory if no argument is supplied. Returns a -//@ [ShellString](#shellstringstr) to indicate success or failure. -function _cd(options, dir) { - if (!dir) dir = os.homedir(); - - if (dir === '-') { - if (!process.env.OLDPWD) { - common.error('could not find previous directory'); - } else { - dir = process.env.OLDPWD; - } - } - - try { - var curDir = process.cwd(); - process.chdir(dir); - process.env.OLDPWD = curDir; - } catch (e) { - // something went wrong, let's figure out the error - var err; - try { - common.statFollowLinks(dir); // if this succeeds, it must be some sort of file - err = 'not a directory: ' + dir; - } catch (e2) { - err = 'no such file or directory: ' + dir; - } - if (err) common.error(err); - } - return ''; -} -module.exports = _cd; diff --git a/node_modules/shelljs/src/chmod.js b/node_modules/shelljs/src/chmod.js deleted file mode 100644 index b930cc7..0000000 --- a/node_modules/shelljs/src/chmod.js +++ /dev/null @@ -1,222 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -var PERMS = (function (base) { - return { - OTHER_EXEC: base.EXEC, - OTHER_WRITE: base.WRITE, - OTHER_READ: base.READ, - - GROUP_EXEC: base.EXEC << 3, - GROUP_WRITE: base.WRITE << 3, - GROUP_READ: base.READ << 3, - - OWNER_EXEC: base.EXEC << 6, - OWNER_WRITE: base.WRITE << 6, - OWNER_READ: base.READ << 6, - - // Literal octal numbers are apparently not allowed in "strict" javascript. - STICKY: parseInt('01000', 8), - SETGID: parseInt('02000', 8), - SETUID: parseInt('04000', 8), - - TYPE_MASK: parseInt('0770000', 8), - }; -}({ - EXEC: 1, - WRITE: 2, - READ: 4, -})); - -common.register('chmod', _chmod, { -}); - -//@ -//@ ### chmod([options,] octal_mode || octal_string, file) -//@ ### chmod([options,] symbolic_mode, file) -//@ -//@ Available options: -//@ -//@ + `-v`: output a diagnostic for every file processed//@ -//@ + `-c`: like verbose, but report only when a change is made//@ -//@ + `-R`: change files and directories recursively//@ -//@ -//@ Examples: -//@ -//@ ```javascript -//@ chmod(755, '/Users/brandon'); -//@ chmod('755', '/Users/brandon'); // same as above -//@ chmod('u+x', '/Users/brandon'); -//@ chmod('-R', 'a-w', '/Users/brandon'); -//@ ``` -//@ -//@ Alters the permissions of a file or directory by either specifying the -//@ absolute permissions in octal form or expressing the changes in symbols. -//@ This command tries to mimic the POSIX behavior as much as possible. -//@ Notable exceptions: -//@ -//@ + In symbolic modes, `a-r` and `-r` are identical. No consideration is -//@ given to the `umask`. -//@ + There is no "quiet" option, since default behavior is to run silent. -//@ + Windows OS uses a very different permission model than POSIX. `chmod()` -//@ does its best on Windows, but there are limits to how file permissions can -//@ be set. Note that WSL (Windows subsystem for Linux) **does** follow POSIX, -//@ so cross-platform compatibility should not be a concern there. -//@ -//@ Returns a [ShellString](#shellstringstr) indicating success or failure. -function _chmod(options, mode, filePattern) { - if (!filePattern) { - if (options.length > 0 && options.charAt(0) === '-') { - // Special case where the specified file permissions started with - to subtract perms, which - // get picked up by the option parser as command flags. - // If we are down by one argument and options starts with -, shift everything over. - [].unshift.call(arguments, ''); - } else { - common.error('You must specify a file.'); - } - } - - options = common.parseOptions(options, { - 'R': 'recursive', - 'c': 'changes', - 'v': 'verbose', - }); - - filePattern = [].slice.call(arguments, 2); - - var files; - - // TODO: replace this with a call to common.expand() - if (options.recursive) { - files = []; - filePattern.forEach(function addFile(expandedFile) { - var stat = common.statNoFollowLinks(expandedFile); - - if (!stat.isSymbolicLink()) { - files.push(expandedFile); - - if (stat.isDirectory()) { // intentionally does not follow symlinks. - fs.readdirSync(expandedFile).forEach(function (child) { - addFile(expandedFile + '/' + child); - }); - } - } - }); - } else { - files = filePattern; - } - - files.forEach(function innerChmod(file) { - file = path.resolve(file); - if (!fs.existsSync(file)) { - common.error('File not found: ' + file); - } - - // When recursing, don't follow symlinks. - if (options.recursive && common.statNoFollowLinks(file).isSymbolicLink()) { - return; - } - - var stat = common.statFollowLinks(file); - var isDir = stat.isDirectory(); - var perms = stat.mode; - var type = perms & PERMS.TYPE_MASK; - - var newPerms = perms; - - if (Number.isNaN(parseInt(mode, 8))) { - // parse options - mode.split(',').forEach(function (symbolicMode) { - var pattern = /([ugoa]*)([=+-])([rwxXst]*)/i; - var matches = pattern.exec(symbolicMode); - - if (matches) { - var applyTo = matches[1]; - var operator = matches[2]; - var change = matches[3]; - - var changeOwner = applyTo.includes('u') || applyTo === 'a' || applyTo === ''; - var changeGroup = applyTo.includes('g') || applyTo === 'a' || applyTo === ''; - var changeOther = applyTo.includes('o') || applyTo === 'a' || applyTo === ''; - - var changeRead = change.includes('r'); - var changeWrite = change.includes('w'); - var changeExec = change.includes('x'); - var changeExecDir = change.includes('X'); - var changeSticky = change.includes('t'); - var changeSetuid = change.includes('s'); - - if (changeExecDir && isDir) { - changeExec = true; - } - - var mask = 0; - if (changeOwner) { - mask |= (changeRead ? PERMS.OWNER_READ : 0) + (changeWrite ? PERMS.OWNER_WRITE : 0) + (changeExec ? PERMS.OWNER_EXEC : 0) + (changeSetuid ? PERMS.SETUID : 0); - } - if (changeGroup) { - mask |= (changeRead ? PERMS.GROUP_READ : 0) + (changeWrite ? PERMS.GROUP_WRITE : 0) + (changeExec ? PERMS.GROUP_EXEC : 0) + (changeSetuid ? PERMS.SETGID : 0); - } - if (changeOther) { - mask |= (changeRead ? PERMS.OTHER_READ : 0) + (changeWrite ? PERMS.OTHER_WRITE : 0) + (changeExec ? PERMS.OTHER_EXEC : 0); - } - - // Sticky bit is special - it's not tied to user, group or other. - if (changeSticky) { - mask |= PERMS.STICKY; - } - - switch (operator) { - case '+': - newPerms |= mask; - break; - - case '-': - newPerms &= ~mask; - break; - - case '=': - newPerms = type + mask; - - // According to POSIX, when using = to explicitly set the - // permissions, setuid and setgid can never be cleared. - if (common.statFollowLinks(file).isDirectory()) { - newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; - } - break; - default: - common.error('Could not recognize operator: `' + operator + '`'); - } - - if (options.verbose) { - console.log(file + ' -> ' + newPerms.toString(8)); - } - - if (perms !== newPerms) { - if (!options.verbose && options.changes) { - console.log(file + ' -> ' + newPerms.toString(8)); - } - fs.chmodSync(file, newPerms); - perms = newPerms; // for the next round of changes! - } - } else { - common.error('Invalid symbolic mode change: ' + symbolicMode); - } - }); - } else { - // they gave us a full number - newPerms = type + parseInt(mode, 8); - - // POSIX rules are that setuid and setgid can only be added using numeric - // form, but not cleared. - if (common.statFollowLinks(file).isDirectory()) { - newPerms |= (PERMS.SETUID + PERMS.SETGID) & perms; - } - - fs.chmodSync(file, newPerms); - } - }); - return ''; -} -module.exports = _chmod; diff --git a/node_modules/shelljs/src/cmd.js b/node_modules/shelljs/src/cmd.js deleted file mode 100644 index a00d6c4..0000000 --- a/node_modules/shelljs/src/cmd.js +++ /dev/null @@ -1,138 +0,0 @@ -var execa = require('execa'); -var common = require('./common'); - -var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; -var COMMAND_NOT_FOUND_ERROR_CODE = 127; - -common.register('cmd', _cmd, { - cmdOptions: null, - globStart: 1, - canReceivePipe: true, - wrapOutput: true, -}); - -function isCommandNotFound(execaResult) { - if (process.platform === 'win32') { - var str = 'is not recognized as an internal or external command'; - return execaResult.exitCode && execaResult.stderr.includes(str); - } - return execaResult.failed && execaResult.code === 'ENOENT'; -} - -function isExecaInternalError(result) { - if (typeof result.stdout !== 'string') return true; - if (typeof result.stderr !== 'string') return true; - if (typeof result.exitCode !== 'number') return true; - if (result.exitCode === 0 && result.failed) return true; - // Otherwise assume this executed correctly. The command may still have exited - // with non-zero status, but that's not due to anything execa did. - return false; -} - -//@ -//@ ### cmd(arg1[, arg2, ...] [, options]) -//@ -//@ Available options: -//@ -//@ + `cwd: directoryPath`: change the current working directory only for this -//@ cmd() invocation. -//@ + `maxBuffer: num`: Raise or decrease the default buffer size for -//@ stdout/stderr. -//@ + `timeout`: Change the default timeout. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var version = cmd('node', '--version').stdout; -//@ cmd('git', 'commit', '-am', `Add suport for node ${version}`); -//@ console.log(cmd('echo', '1st arg', '2nd arg', '3rd arg').stdout) -//@ console.log(cmd('echo', 'this handles ;, |, &, etc. as literal characters').stdout) -//@ ``` -//@ -//@ Executes the given command synchronously. This is intended as an easier -//@ alternative for [exec()](#execcommand--options--callback), with better -//@ security around globbing, comamnd injection, and variable expansion. This is -//@ guaranteed to only run one external command, and won't give special -//@ treatment for any shell characters (ex. this treats `|` as a literal -//@ character, not as a shell pipeline). -//@ This returns a [ShellString](#shellstringstr). -//@ -//@ By default, this performs globbing on all platforms, but you can disable -//@ this with `set('-f')`. -//@ -//@ This **does not** support asynchronous mode. If you need asynchronous -//@ command execution, check out [execa](https://www.npmjs.com/package/execa) or -//@ the node builtin `child_process.execFile()` instead. -function _cmd(options, command, commandArgs, userOptions) { - if (!command) { - common.error('Must specify a non-empty string as a command'); - } - - // `options` will usually not have a value: it's added by our commandline flag - // parsing engine. - commandArgs = [].slice.call(arguments, 2); - - // `userOptions` may or may not be provided. We need to check the last - // argument. If it's an object, assume it's meant to be passed as - // userOptions (since ShellStrings are already flattened to strings). - if (commandArgs.length === 0) { - userOptions = {}; - } else { - var lastArg = commandArgs.pop(); - if (common.isObject(lastArg)) { - userOptions = lastArg; - } else { - userOptions = {}; - commandArgs.push(lastArg); - } - } - - var pipe = common.readFromPipe(); - - // Some of our defaults differ from execa's defaults. These can be overridden - // by the user. - var defaultOptions = { - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - stripFinalNewline: false, // Preserve trailing newlines for consistency with unix. - reject: false, // Use ShellJS's error handling system. - }; - - // For other options, we forbid the user from overriding them (either for - // correctness or security). - var requiredOptions = { - input: pipe, - shell: false, - }; - - var execaOptions = - Object.assign(defaultOptions, userOptions, requiredOptions); - - var result = execa.sync(command, commandArgs, execaOptions); - var stdout; - var stderr; - var code; - if (isCommandNotFound(result)) { - // This can happen if `command` is not an executable binary, or possibly - // under other conditions. - stdout = ''; - stderr = "'" + command + "': command not found"; - code = COMMAND_NOT_FOUND_ERROR_CODE; - } else if (isExecaInternalError(result)) { - // Catch-all: execa tried to run `command` but it encountered some error - // (ex. maxBuffer, timeout). - stdout = result.stdout || ''; - stderr = result.stderr || - `'${command}' encountered an error during execution`; - code = result.exitCode !== undefined && result.exitCode > 0 ? result.exitCode : 1; - } else { - // Normal exit: execa was able to execute `command` and get a return value. - stdout = result.stdout.toString(); - stderr = result.stderr.toString(); - code = result.exitCode; - } - - // Pass `continue: true` so we can specify a value for stdout. - if (code) common.error(stderr, code, { silent: true, continue: true }); - return new common.ShellString(stdout, stderr, code); -} -module.exports = _cmd; diff --git a/node_modules/shelljs/src/common.js b/node_modules/shelljs/src/common.js deleted file mode 100644 index b9ffeda..0000000 --- a/node_modules/shelljs/src/common.js +++ /dev/null @@ -1,545 +0,0 @@ -// Ignore warning about 'new String()' and use of the Buffer constructor -/* eslint no-new-wrappers: "off", - no-buffer-constructor: "off" */ - -'use strict'; - -var os = require('os'); -var fs = require('fs'); -var glob = require('fast-glob'); - -var shell = {}; -exports.shell = shell; - -var shellMethods = Object.create(shell); - -exports.extend = Object.assign; - -// Check if we're running under electron -var isElectron = Boolean(process.versions.electron); - -// Module globals (assume no execPath by default) -var DEFAULT_CONFIG = { - fatal: false, - globOptions: {}, - maxdepth: 255, - noglob: false, - silent: false, - verbose: false, - execPath: null, - bufLength: 64 * 1024, // 64KB -}; - -var config = { - reset() { - Object.assign(this, DEFAULT_CONFIG); - if (!isElectron) { - this.execPath = process.execPath; - } - }, - resetForTesting() { - this.reset(); - this.silent = true; - }, -}; - -config.reset(); -exports.config = config; - -// Note: commands should generally consider these as read-only values. -var state = { - error: null, - errorCode: 0, - currentCmd: 'shell.js', -}; -exports.state = state; - -delete process.env.OLDPWD; // initially, there's no previous directory - -// Reliably test if something is any sort of javascript object -function isObject(a) { - return typeof a === 'object' && a !== null; -} -exports.isObject = isObject; - -function log() { - /* istanbul ignore next */ - if (!config.silent) { - console.error.apply(console, arguments); - } -} -exports.log = log; - -// Converts strings to be equivalent across all platforms. Primarily responsible -// for making sure we use '/' instead of '\' as path separators, but this may be -// expanded in the future if necessary -function convertErrorOutput(msg) { - if (typeof msg !== 'string') { - throw new TypeError('input must be a string'); - } - return msg.replace(/\\/g, '/'); -} -exports.convertErrorOutput = convertErrorOutput; - -// An exception class to help propagate command errors (e.g., non-zero exit -// status) up to the top-level. {@param value} should be a ShellString. -class CommandError extends Error { - constructor(value) { - super(value.toString()); - this.returnValue = value; - } -} -exports.CommandError = CommandError; // visible for testing - -// Shows error message. Throws if fatal is true (defaults to config.fatal, overridable with options.fatal) -function error(msg, _code, options) { - // Validate input - if (typeof msg !== 'string') throw new Error('msg must be a string'); - - var DEFAULT_OPTIONS = { - continue: false, - code: 1, - prefix: state.currentCmd + ': ', - silent: false, - fatal: config.fatal, - }; - - if (typeof _code === 'number' && isObject(options)) { - options.code = _code; - } else if (isObject(_code)) { // no 'code' - options = _code; - } else if (typeof _code === 'number') { // no 'options' - options = { code: _code }; - } else if (typeof _code !== 'number') { // only 'msg' - options = {}; - } - options = Object.assign({}, DEFAULT_OPTIONS, options); - - if (!state.errorCode) state.errorCode = options.code; - - var logEntry = convertErrorOutput(options.prefix + msg); - state.error = state.error ? state.error + '\n' : ''; - state.error += logEntry; - - // Throw an error, or log the entry - if (options.fatal) { - var err = new Error(logEntry); - err.code = options.code; - throw err; - } - if (msg.length > 0 && !options.silent) log(logEntry); - - if (!options.continue) { - throw new CommandError(new ShellString('', state.error, state.errorCode)); - } -} -exports.error = error; - -//@ -//@ ### ShellString(str) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var foo = new ShellString('hello world'); -//@ ``` -//@ -//@ This is a dedicated type returned by most ShellJS methods, which wraps a -//@ string (or array) value. This has all the string (or array) methods, but -//@ also exposes extra methods: [`.to()`](#shellstringprototypetofile), -//@ [`.toEnd()`](#shellstringprototypetoendfile), and all the pipe-able methods -//@ (ex. `.cat()`, `.grep()`, etc.). This can be easily converted into a string -//@ by calling `.toString()`. -//@ -//@ This type also exposes the corresponding command's stdout, stderr, and -//@ return status code via the `.stdout` (string), `.stderr` (string), and -//@ `.code` (number) properties respectively. -function ShellString(stdout, stderr, code) { - var that; - if (stdout instanceof Array) { - that = stdout; - that.stdout = stdout.join('\n'); - if (stdout.length > 0) that.stdout += '\n'; - } else { - that = new String(stdout); - that.stdout = stdout; - } - that.stderr = stderr; - that.code = code; - // A list of all commands that can appear on the right-hand side of a pipe - // (populated by calls to common.wrap()) - pipeMethods.forEach(function (cmd) { - that[cmd] = shellMethods[cmd].bind(that); - }); - return that; -} - -exports.ShellString = ShellString; - -// Returns {'alice': true, 'bob': false} when passed a string and dictionary as follows: -// parseOptions('-a', {'a':'alice', 'b':'bob'}); -// Returns {'reference': 'string-value', 'bob': false} when passed two dictionaries of the form: -// parseOptions({'-r': 'string-value'}, {'r':'reference', 'b':'bob'}); -// Throws an error when passed a string that does not start with '-': -// parseOptions('a', {'a':'alice'}); // throws -function parseOptions(opt, map, errorOptions) { - errorOptions = errorOptions || {}; - // Validate input - if (typeof opt !== 'string' && !isObject(opt)) { - throw new TypeError('options must be strings or key-value pairs'); - } else if (!isObject(map)) { - throw new TypeError('parseOptions() internal error: map must be an object'); - } else if (!isObject(errorOptions)) { - throw new TypeError( - 'parseOptions() internal error: errorOptions must be object', - ); - } - - if (opt === '--') { - // This means there are no options. - return {}; - } - - // All options are false by default - var options = {}; - Object.keys(map).forEach(function (letter) { - var optName = map[letter]; - if (optName[0] !== '!') { - options[optName] = false; - } - }); - - if (opt === '') return options; // defaults - - if (typeof opt === 'string') { - if (opt[0] !== '-') { - throw new Error("Options string must start with a '-'"); - } - - // e.g. chars = ['R', 'f'] - var chars = opt.slice(1).split(''); - - chars.forEach(function (c) { - if (c in map) { - var optionName = map[c]; - if (optionName[0] === '!') { - options[optionName.slice(1)] = false; - } else { - options[optionName] = true; - } - } else { - error('option not recognized: ' + c, errorOptions); - } - }); - } else { // opt is an Object - Object.keys(opt).forEach(function (key) { - if (key[0] === '-') { - // key is a string of the form '-r', '-d', etc. - var c = key[1]; - if (c in map) { - var optionName = map[c]; - options[optionName] = opt[key]; // assign the given value - } else { - error('option not recognized: ' + c, errorOptions); - } - } else if (key in options) { - // key is a "long option", so it should be the same - options[key] = opt[key]; - } else { - error('option not recognized: {' + key + ':...}', errorOptions); - } - }); - } - return options; -} -exports.parseOptions = parseOptions; - -function globOptions() { - // These options are just to make fast-glob be compatible with POSIX (bash) - // wildcard behavior. - var defaultGlobOptions = { - onlyFiles: false, - followSymbolicLinks: false, - }; - - var newGlobOptions = Object.assign({}, config.globOptions); - var optionRenames = { - // node-glob's 'nodir' is not quote the same as fast-glob's 'onlyFiles'. - // Compatibility for this is implemented at the call site. - mark: 'markDirectories', - matchBase: 'baseNameMatch', - }; - Object.keys(optionRenames).forEach(function (oldKey) { - var newKey = optionRenames[oldKey]; - if (oldKey in config.globOptions) { - newGlobOptions[newKey] = config.globOptions[oldKey]; - } - }); - var invertedOptionRenames = { - nobrace: 'braceExpansion', - noglobstar: 'globstar', - noext: 'extglob', - nocase: 'caseSensitiveMatch', - }; - Object.keys(invertedOptionRenames).forEach(function (oldKey) { - var newKey = invertedOptionRenames[oldKey]; - if (oldKey in config.globOptions) { - newGlobOptions[newKey] = !config.globOptions[oldKey]; - } - }); - return Object.assign({}, defaultGlobOptions, newGlobOptions); -} - -// Expands wildcards with matching (ie. existing) file names. -// For example: -// expand(['file*.js']) = ['file1.js', 'file2.js', ...] -// (if the files 'file1.js', 'file2.js', etc, exist in the current dir) -function expand(list) { - if (!Array.isArray(list)) { - throw new TypeError('must be an array'); - } - var expanded = []; - list.forEach(function (listEl) { - // Don't expand non-strings - if (typeof listEl !== 'string') { - expanded.push(listEl); - } else { - var ret; - var globOpts = globOptions(); - try { - ret = glob.sync(listEl, globOpts); - } catch (e) { - // if glob fails, interpret the string literally - ret = [listEl]; - } - // if nothing matched, interpret the string literally - ret = ret.length > 0 ? ret.sort() : [listEl]; - if (globOpts.nodir) { - ret = ret.filter(function (file) { - return !statNoFollowLinks(file).isDirectory(); - }); - } - expanded = expanded.concat(ret); - } - }); - return expanded; -} -exports.expand = expand; - -// Normalizes Buffer creation, using Buffer.alloc if possible. -// Also provides a good default buffer length for most use cases. -var buffer = typeof Buffer.alloc === 'function' ? - function (len) { - return Buffer.alloc(len || config.bufLength); - } : - function (len) { - return new Buffer(len || config.bufLength); - }; -exports.buffer = buffer; - -// Normalizes _unlinkSync() across platforms to match Unix behavior, i.e. -// file can be unlinked even if it's read-only, see https://github.com/joyent/node/issues/3006 -function unlinkSync(file) { - try { - fs.unlinkSync(file); - } catch (e) { - // Try to override file permission - /* istanbul ignore next */ - if (e.code === 'EPERM') { - fs.chmodSync(file, '0666'); - fs.unlinkSync(file); - } else { - throw e; - } - } -} -exports.unlinkSync = unlinkSync; - -// wrappers around common.statFollowLinks and common.statNoFollowLinks that clarify intent -// and improve readability -function statFollowLinks() { - return fs.statSync.apply(fs, arguments); -} -exports.statFollowLinks = statFollowLinks; - -function statNoFollowLinks() { - return fs.lstatSync.apply(fs, arguments); -} -exports.statNoFollowLinks = statNoFollowLinks; - -// e.g. 'shelljs_a5f185d0443ca...' -function randomFileName() { - function randomHash(count) { - if (count === 1) { - return parseInt(16 * Math.random(), 10).toString(16); - } - var hash = ''; - for (var i = 0; i < count; i++) { - hash += randomHash(1); - } - return hash; - } - - return 'shelljs_' + randomHash(20); -} -exports.randomFileName = randomFileName; - -// Common wrapper for all Unix-like commands that performs glob expansion, -// command-logging, and other nice things -function wrap(cmd, fn, options) { - options = options || {}; - return function () { - var retValue = null; - - state.currentCmd = cmd; - state.error = null; - state.errorCode = 0; - - try { - var args = [].slice.call(arguments, 0); - - // Log the command to stderr, if appropriate - if (config.verbose) { - console.error.apply(console, [cmd].concat(args)); - } - - // If this is coming from a pipe, let's set the pipedValue (otherwise, set - // it to the empty string) - state.pipedValue = (this && typeof this.stdout === 'string') ? this.stdout : ''; - - if (options.unix === false) { // this branch is for exec() - retValue = fn.apply(this, args); - } else { // and this branch is for everything else - if (isObject(args[0]) && args[0].constructor.name === 'Object') { - // a no-op, allowing the syntax `touch({'-r': file}, ...)` - } else if (args.length === 0 || typeof args[0] !== 'string' || args[0].length <= 1 || args[0][0] !== '-') { - args.unshift(''); // only add dummy option if '-option' not already present - } - - // flatten out arrays that are arguments, to make the syntax: - // `cp([file1, file2, file3], dest);` - // equivalent to: - // `cp(file1, file2, file3, dest);` - args = args.reduce(function (accum, cur) { - if (Array.isArray(cur)) { - return accum.concat(cur); - } - accum.push(cur); - return accum; - }, []); - - // Convert ShellStrings (basically just String objects) to regular strings - args = args.map(function (arg) { - if (isObject(arg) && arg.constructor.name === 'String') { - return arg.toString(); - } - return arg; - }); - - // Expand the '~' if appropriate - var homeDir = os.homedir(); - args = args.map(function (arg) { - if (typeof arg === 'string' && arg.slice(0, 2) === '~/' || arg === '~') { - return arg.replace(/^~/, homeDir); - } - return arg; - }); - - // Perform glob-expansion on all arguments after globStart, but preserve - // the arguments before it (like regexes for sed and grep) - if (!config.noglob && options.allowGlobbing === true) { - args = args.slice(0, options.globStart).concat(expand(args.slice(options.globStart))); - } - - try { - // parse options if options are provided - if (isObject(options.cmdOptions)) { - args[0] = parseOptions(args[0], options.cmdOptions); - } - - retValue = fn.apply(this, args); - } catch (e) { - /* istanbul ignore else */ - if (e instanceof CommandError) { - retValue = e.returnValue; - } else { - throw e; // this is probably a bug that should be thrown up the call stack - } - } - } - } catch (e) { - /* istanbul ignore next */ - if (!state.error) { - // If state.error hasn't been set it's an error thrown by Node, not us - probably a bug... - e.name = 'ShellJSInternalError'; - throw e; - } - if (config.fatal || options.handlesFatalDynamically) throw e; - } - - if (options.wrapOutput && - (typeof retValue === 'string' || Array.isArray(retValue))) { - retValue = new ShellString(retValue, state.error, state.errorCode); - } - - state.currentCmd = 'shell.js'; - return retValue; - }; -} // wrap -exports.wrap = wrap; - -// This returns all the input that is piped into the current command (or the -// empty string, if this isn't on the right-hand side of a pipe -function _readFromPipe() { - return state.pipedValue; -} -exports.readFromPipe = _readFromPipe; - -var DEFAULT_WRAP_OPTIONS = { - allowGlobbing: true, - canReceivePipe: false, - cmdOptions: null, - globStart: 1, - handlesFatalDynamically: false, - pipeOnly: false, - wrapOutput: true, - unix: true, -}; - -// This is populated during plugin registration -var pipeMethods = []; - -// Register a new ShellJS command -function _register(name, implementation, wrapOptions) { - wrapOptions = wrapOptions || {}; - - // Validate options - Object.keys(wrapOptions).forEach(function (option) { - if (!DEFAULT_WRAP_OPTIONS.hasOwnProperty(option)) { - throw new Error("Unknown option '" + option + "'"); - } - if (typeof wrapOptions[option] !== typeof DEFAULT_WRAP_OPTIONS[option]) { - throw new TypeError("Unsupported type '" + typeof wrapOptions[option] + - "' for option '" + option + "'"); - } - }); - - // If an option isn't specified, use the default - wrapOptions = Object.assign({}, DEFAULT_WRAP_OPTIONS, wrapOptions); - - if (shell.hasOwnProperty(name)) { - throw new Error('Command `' + name + '` already exists'); - } - - if (wrapOptions.pipeOnly) { - wrapOptions.canReceivePipe = true; - shellMethods[name] = wrap(name, implementation, wrapOptions); - } else { - shell[name] = wrap(name, implementation, wrapOptions); - } - - if (wrapOptions.canReceivePipe) { - pipeMethods.push(name); - } -} -exports.register = _register; diff --git a/node_modules/shelljs/src/cp.js b/node_modules/shelljs/src/cp.js deleted file mode 100644 index af4a0a1..0000000 --- a/node_modules/shelljs/src/cp.js +++ /dev/null @@ -1,314 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('cp', _cp, { - cmdOptions: { - 'f': '!no_force', - 'n': 'no_force', - 'u': 'update', - 'R': 'recursive', - 'r': 'recursive', - 'L': 'followsymlink', - 'P': 'noFollowsymlink', - 'p': 'preserve', - }, - wrapOutput: false, -}); - -// Buffered file copy, synchronous -// (Using readFileSync() + writeFileSync() could easily cause a memory overflow -// with large files) -function copyFileSync(srcFile, destFile, options) { - if (!fs.existsSync(srcFile)) { - common.error('copyFileSync: no such file or directory: ' + srcFile); - } - - var isWindows = process.platform === 'win32'; - - // Check the mtimes of the files if the '-u' flag is provided - try { - if (options.update && common.statFollowLinks(srcFile).mtime < fs.statSync(destFile).mtime) { - return; - } - } catch (e) { - // If we're here, destFile probably doesn't exist, so just do a normal copy - } - - if (common.statNoFollowLinks(srcFile).isSymbolicLink() && !options.followsymlink) { - try { - common.statNoFollowLinks(destFile); - common.unlinkSync(destFile); // re-link it - } catch (e) { - // it doesn't exist, so no work needs to be done - } - - var symlinkFull = fs.readlinkSync(srcFile); - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - } else { - var buf = common.buffer(); - var bufLength = buf.length; - var bytesRead = bufLength; - var pos = 0; - var fdr = null; - var fdw = null; - var srcStat = common.statFollowLinks(srcFile); - - try { - fdr = fs.openSync(srcFile, 'r'); - } catch (e) { - /* istanbul ignore next */ - common.error('copyFileSync: could not read src file (' + srcFile + ')'); - } - - try { - fdw = fs.openSync(destFile, 'w', srcStat.mode); - } catch (e) { - /* istanbul ignore next */ - common.error('copyFileSync: could not write to dest file (code=' + e.code + '):' + destFile); - } - - while (bytesRead === bufLength) { - bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); - fs.writeSync(fdw, buf, 0, bytesRead); - pos += bytesRead; - } - - if (options.preserve) { - fs.fchownSync(fdw, srcStat.uid, srcStat.gid); - // Note: utimesSync does not work (rounds to seconds), but futimesSync has - // millisecond precision. - fs.futimesSync(fdw, srcStat.atime, srcStat.mtime); - } - - fs.closeSync(fdr); - fs.closeSync(fdw); - } -} - -// Recursively copies 'sourceDir' into 'destDir' -// Adapted from https://github.com/ryanmcgrath/wrench-js -// -// Copyright (c) 2010 Ryan McGrath -// Copyright (c) 2012 Artur Adib -// -// Licensed under the MIT License -// http://www.opensource.org/licenses/mit-license.php -function cpdirSyncRecursive(sourceDir, destDir, currentDepth, opts) { - if (!opts) opts = {}; - - // Ensure there is not a run away recursive copy - if (currentDepth >= common.config.maxdepth) return; - currentDepth++; - - var isWindows = process.platform === 'win32'; - - // Create the directory where all our junk is moving to; read the mode/etc. of - // the source directory (we'll set this on the destDir at the end). - var checkDir = common.statFollowLinks(sourceDir); - try { - fs.mkdirSync(destDir); - } catch (e) { - // if the directory already exists, that's okay - if (e.code !== 'EEXIST') throw e; - } - - var files = fs.readdirSync(sourceDir); - - for (var i = 0; i < files.length; i++) { - var srcFile = sourceDir + '/' + files[i]; - var destFile = destDir + '/' + files[i]; - var srcFileStat = common.statNoFollowLinks(srcFile); - - var symlinkFull; - if (opts.followsymlink) { - if (cpcheckcycle(sourceDir, srcFile)) { - // Cycle link found. - console.error('Cycle link found.'); - symlinkFull = fs.readlinkSync(srcFile); - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - continue; - } - } - if (srcFileStat.isDirectory()) { - /* recursion this thing right on back. */ - cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); - } else if (srcFileStat.isSymbolicLink() && !opts.followsymlink) { - symlinkFull = fs.readlinkSync(srcFile); - try { - common.statNoFollowLinks(destFile); - common.unlinkSync(destFile); // re-link it - } catch (e) { - // it doesn't exist, so no work needs to be done - } - fs.symlinkSync(symlinkFull, destFile, isWindows ? 'junction' : null); - } else if (srcFileStat.isSymbolicLink() && opts.followsymlink) { - srcFileStat = common.statFollowLinks(srcFile); - if (srcFileStat.isDirectory()) { - cpdirSyncRecursive(srcFile, destFile, currentDepth, opts); - } else { - copyFileSync(srcFile, destFile, opts); - } - } else if (fs.existsSync(destFile) && opts.no_force) { - common.log('skipping existing file: ' + files[i]); - } else { - copyFileSync(srcFile, destFile, opts); - } - } // for files - - // finally change the mode for the newly created directory (otherwise, we - // couldn't add files to a read-only directory). - // var checkDir = common.statFollowLinks(sourceDir); - if (opts.preserve) { - fs.utimesSync(destDir, checkDir.atime, checkDir.mtime); - } - fs.chmodSync(destDir, checkDir.mode); -} // cpdirSyncRecursive - -// Checks if cureent file was created recently -function checkRecentCreated(sources, index) { - var lookedSource = sources[index]; - return sources.slice(0, index).some(function (src) { - return path.basename(src) === path.basename(lookedSource); - }); -} - -function cpcheckcycle(sourceDir, srcFile) { - var srcFileStat = common.statNoFollowLinks(srcFile); - if (srcFileStat.isSymbolicLink()) { - // Do cycle check. For example: - // $ mkdir -p 1/2/3/4 - // $ cd 1/2/3/4 - // $ ln -s ../../3 link - // $ cd ../../../.. - // $ cp -RL 1 copy - var cyclecheck = common.statFollowLinks(srcFile); - if (cyclecheck.isDirectory()) { - var sourcerealpath = fs.realpathSync(sourceDir); - var symlinkrealpath = fs.realpathSync(srcFile); - var re = new RegExp(symlinkrealpath); - if (re.test(sourcerealpath)) { - return true; - } - } - } - return false; -} - -//@ -//@ ### cp([options,] source [, source ...], dest) -//@ ### cp([options,] source_array, dest) -//@ -//@ Available options: -//@ -//@ + `-f`: force (default behavior) -//@ + `-n`: no-clobber -//@ + `-u`: only copy if `source` is newer than `dest` -//@ + `-r`, `-R`: recursive -//@ + `-L`: follow symlinks -//@ + `-P`: don't follow symlinks -//@ + `-p`: preserve file mode, ownership, and timestamps -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cp('file1', 'dir1'); -//@ cp('-R', 'path/to/dir/', '~/newCopy/'); -//@ cp('-Rf', '/tmp/*', '/usr/local/*', '/home/tmp'); -//@ cp('-Rf', ['/tmp/*', '/usr/local/*'], '/home/tmp'); // same as above -//@ ``` -//@ -//@ Copies files. Returns a [ShellString](#shellstringstr) indicating success -//@ or failure. -function _cp(options, sources, dest) { - // If we're missing -R, it actually implies -L (unless -P is explicit) - if (options.followsymlink) { - options.noFollowsymlink = false; - } - if (!options.recursive && !options.noFollowsymlink) { - options.followsymlink = true; - } - - // Get sources, dest - if (arguments.length < 3) { - common.error('missing and/or '); - } else { - sources = [].slice.call(arguments, 1, arguments.length - 1); - dest = arguments[arguments.length - 1]; - } - - var destExists = fs.existsSync(dest); - var destStat = destExists && common.statFollowLinks(dest); - - // Dest is not existing dir, but multiple sources given - if ((!destExists || !destStat.isDirectory()) && sources.length > 1) { - common.error('dest is not a directory (too many sources)'); - } - - // Dest is an existing file, but -n is given - if (destExists && destStat.isFile() && options.no_force) { - return new common.ShellString('', '', 0); - } - - sources.forEach(function (src, srcIndex) { - if (!fs.existsSync(src)) { - if (src === '') src = "''"; // if src was empty string, display empty string - common.error('no such file or directory: ' + src, { continue: true }); - return; // skip file - } - var srcStat = common.statFollowLinks(src); - if (!options.noFollowsymlink && srcStat.isDirectory()) { - if (!options.recursive) { - // Non-Recursive - common.error("omitting directory '" + src + "'", { continue: true }); - } else { - // Recursive - // 'cp /a/source dest' should create 'source' in 'dest' - var newDest = (destStat && destStat.isDirectory()) ? - path.join(dest, path.basename(src)) : - dest; - - try { - common.statFollowLinks(path.dirname(dest)); - cpdirSyncRecursive(src, newDest, 0, options); - } catch (e) { - /* istanbul ignore next */ - common.error("cannot create directory '" + dest + "': No such file or directory"); - } - } - } else { - // If here, src is a file - - // When copying to '/path/dir': - // thisDest = '/path/dir/file1' - var thisDest = dest; - if (destStat && destStat.isDirectory()) { - thisDest = path.normalize(dest + '/' + path.basename(src)); - } - - var thisDestExists = fs.existsSync(thisDest); - if (thisDestExists && checkRecentCreated(sources, srcIndex)) { - // cannot overwrite file created recently in current execution, but we want to continue copying other files - if (!options.no_force) { - common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); - } - return; - } - - if (thisDestExists && options.no_force) { - return; // skip file - } - - if (path.relative(src, thisDest) === '') { - // a file cannot be copied to itself, but we want to continue copying other files - common.error("'" + thisDest + "' and '" + src + "' are the same file", { continue: true }); - return; - } - - copyFileSync(src, thisDest, options); - } - }); // forEach(src) - - return new common.ShellString('', common.state.error, common.state.errorCode); -} -module.exports = _cp; diff --git a/node_modules/shelljs/src/dirs.js b/node_modules/shelljs/src/dirs.js deleted file mode 100644 index 9b7251d..0000000 --- a/node_modules/shelljs/src/dirs.js +++ /dev/null @@ -1,210 +0,0 @@ -var path = require('path'); -var common = require('./common'); -var _cd = require('./cd'); - -common.register('dirs', _dirs, { - wrapOutput: false, -}); -common.register('pushd', _pushd, { - wrapOutput: false, -}); -common.register('popd', _popd, { - wrapOutput: false, -}); - -// Pushd/popd/dirs internals -var _dirStack = []; - -function _isStackIndex(index) { - return (/^[-+]\d+$/).test(index); -} - -function _parseStackIndex(index) { - if (_isStackIndex(index)) { - if (Math.abs(index) < _dirStack.length + 1) { // +1 for pwd - return (/^-/).test(index) ? Number(index) - 1 : Number(index); - } - common.error(index + ': directory stack index out of range'); - } else { - common.error(index + ': invalid number'); - } -} - -function _actualDirStack() { - return [process.cwd()].concat(_dirStack); -} - -//@ -//@ ### pushd([options,] [dir | '-N' | '+N']) -//@ -//@ Available options: -//@ -//@ + `-n`: Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `dir`: Sets the current working directory to the top of the stack, then executes the equivalent of `cd dir`. -//@ + `+N`: Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -//@ + `-N`: Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ // process.cwd() === '/usr' -//@ pushd('/etc'); // Returns /etc /usr -//@ pushd('+1'); // Returns /usr /etc -//@ ``` -//@ -//@ Save the current directory on the top of the directory stack and then `cd` to `dir`. With no arguments, `pushd` exchanges the top two directories. Returns an array of paths in the stack. -function _pushd(options, dir) { - if (_isStackIndex(options)) { - dir = options; - options = ''; - } - - options = common.parseOptions(options, { - 'n': 'no-cd', - 'q': 'quiet', - }); - - var dirs = _actualDirStack(); - - if (dir === '+0') { - return dirs; // +0 is a noop - } else if (!dir) { - if (dirs.length > 1) { - dirs = dirs.splice(1, 1).concat(dirs); - } else { - return common.error('no other directory'); - } - } else if (_isStackIndex(dir)) { - var n = _parseStackIndex(dir); - dirs = dirs.slice(n).concat(dirs.slice(0, n)); - } else if (options['no-cd']) { - dirs.splice(1, 0, dir); - } else { - dirs.unshift(dir); - } - - if (options['no-cd']) { - dirs = dirs.slice(1); - } else { - dir = path.resolve(dirs.shift()); - _cd('', dir); - } - - _dirStack = dirs; - return _dirs(options.quiet ? '-q' : ''); -} -exports.pushd = _pushd; - -//@ -//@ -//@ ### popd([options,] ['-N' | '+N']) -//@ -//@ Available options: -//@ -//@ + `-n`: Suppress the normal directory change when removing directories from the stack, so that only the stack is manipulated. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `+N`: Removes the Nth directory (counting from the left of the list printed by dirs), starting with zero. -//@ + `-N`: Removes the Nth directory (counting from the right of the list printed by dirs), starting with zero. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ echo(process.cwd()); // '/usr' -//@ pushd('/etc'); // '/etc /usr' -//@ echo(process.cwd()); // '/etc' -//@ popd(); // '/usr' -//@ echo(process.cwd()); // '/usr' -//@ ``` -//@ -//@ When no arguments are given, `popd` removes the top directory from the stack and performs a `cd` to the new top directory. The elements are numbered from 0, starting at the first directory listed with dirs (i.e., `popd` is equivalent to `popd +0`). Returns an array of paths in the stack. -function _popd(options, index) { - if (_isStackIndex(options)) { - index = options; - options = ''; - } - - options = common.parseOptions(options, { - 'n': 'no-cd', - 'q': 'quiet', - }); - - if (!_dirStack.length) { - return common.error('directory stack empty'); - } - - index = _parseStackIndex(index || '+0'); - - if (options['no-cd'] || index > 0 || _dirStack.length + index === 0) { - index = index > 0 ? index - 1 : index; - _dirStack.splice(index, 1); - } else { - var dir = path.resolve(_dirStack.shift()); - _cd('', dir); - } - - return _dirs(options.quiet ? '-q' : ''); -} -exports.popd = _popd; - -//@ -//@ -//@ ### dirs([options | '+N' | '-N']) -//@ -//@ Available options: -//@ -//@ + `-c`: Clears the directory stack by deleting all of the elements. -//@ + `-q`: Suppresses output to the console. -//@ -//@ Arguments: -//@ -//@ + `+N`: Displays the Nth directory (counting from the left of the list printed by dirs when invoked without options), starting with zero. -//@ + `-N`: Displays the Nth directory (counting from the right of the list printed by dirs when invoked without options), starting with zero. -//@ -//@ Display the list of currently remembered directories. Returns an array of paths in the stack, or a single path if `+N` or `-N` was specified. -//@ -//@ See also: `pushd`, `popd` -function _dirs(options, index) { - if (_isStackIndex(options)) { - index = options; - options = ''; - } - - options = common.parseOptions(options, { - 'c': 'clear', - 'q': 'quiet', - }); - - if (options.clear) { - _dirStack = []; - return _dirStack; - } - - var stack = _actualDirStack(); - - if (index) { - index = _parseStackIndex(index); - - if (index < 0) { - index = stack.length + index; - } - - if (!options.quiet) { - common.log(stack[index]); - } - return stack[index]; - } - - if (!options.quiet) { - common.log(stack.join(' ')); - } - - return stack; -} -exports.dirs = _dirs; diff --git a/node_modules/shelljs/src/echo.js b/node_modules/shelljs/src/echo.js deleted file mode 100644 index da37f43..0000000 --- a/node_modules/shelljs/src/echo.js +++ /dev/null @@ -1,62 +0,0 @@ -var format = require('util').format; - -var common = require('./common'); - -common.register('echo', _echo, { - allowGlobbing: false, -}); - -//@ -//@ ### echo([options,] string [, string ...]) -//@ -//@ Available options: -//@ -//@ + `-e`: interpret backslash escapes (default) -//@ + `-n`: remove trailing newline from output -//@ -//@ Examples: -//@ -//@ ```javascript -//@ echo('hello world'); -//@ var str = echo('hello world'); -//@ echo('-n', 'no newline at end'); -//@ ``` -//@ -//@ Prints `string` to stdout, and returns a [ShellString](#shellstringstr). -function _echo(opts) { - // allow strings starting with '-', see issue #20 - var messages = [].slice.call(arguments, opts ? 0 : 1); - var options = {}; - - // If the first argument starts with '-', parse it as options string. - // If parseOptions throws, it wasn't an options string. - try { - options = common.parseOptions(messages[0], { - 'e': 'escapes', - 'n': 'no_newline', - }, { - silent: true, - }); - - // Allow null to be echoed - if (messages[0]) { - messages.shift(); - } - } catch (_) { - // Clear out error if an error occurred - common.state.error = null; - } - - var output = format.apply(null, messages); - - // Add newline if -n is not passed. - if (!options.no_newline) { - output += '\n'; - } - - process.stdout.write(output); - - return output; -} - -module.exports = _echo; diff --git a/node_modules/shelljs/src/error.js b/node_modules/shelljs/src/error.js deleted file mode 100644 index b0ed59e..0000000 --- a/node_modules/shelljs/src/error.js +++ /dev/null @@ -1,15 +0,0 @@ -var common = require('./common'); - -//@ -//@ ### error() -//@ -//@ Tests if error occurred in the last command. Returns a truthy value if an -//@ error returned, or a falsy value otherwise. -//@ -//@ **Note**: do not rely on the -//@ return value to be an error message. If you need the last error message, use -//@ the `.stderr` attribute from the last command's return value instead. -function error() { - return common.state.error; -} -module.exports = error; diff --git a/node_modules/shelljs/src/errorCode.js b/node_modules/shelljs/src/errorCode.js deleted file mode 100644 index a1c7fd2..0000000 --- a/node_modules/shelljs/src/errorCode.js +++ /dev/null @@ -1,10 +0,0 @@ -var common = require('./common'); - -//@ -//@ ### errorCode() -//@ -//@ Returns the error code from the last command. -function errorCode() { - return common.state.errorCode; -} -module.exports = errorCode; diff --git a/node_modules/shelljs/src/exec-child.js b/node_modules/shelljs/src/exec-child.js deleted file mode 100644 index e8446f6..0000000 --- a/node_modules/shelljs/src/exec-child.js +++ /dev/null @@ -1,71 +0,0 @@ -var childProcess = require('child_process'); -var fs = require('fs'); - -function main() { - var paramFilePath = process.argv[2]; - - var serializedParams = fs.readFileSync(paramFilePath, 'utf8'); - var params = JSON.parse(serializedParams); - - var cmd = params.command; - var execOptions = params.execOptions; - var pipe = params.pipe; - var stdoutFile = params.stdoutFile; - var stderrFile = params.stderrFile; - - function isMaxBufferError(err) { - var maxBufferErrorPattern = /^.*\bmaxBuffer\b.*exceeded.*$/; - if (err instanceof Error && err.message && - err.message.match(maxBufferErrorPattern)) { - // < v10 - // Error: stdout maxBuffer exceeded - return true; - } else if (err instanceof RangeError && err.message && - err.message.match(maxBufferErrorPattern)) { - // >= v10 - // RangeError [ERR_CHILD_PROCESS_STDIO_MAXBUFFER]: stdout maxBuffer length - // exceeded - return true; - } - return false; - } - - var stdoutStream = fs.createWriteStream(stdoutFile); - var stderrStream = fs.createWriteStream(stderrFile); - - function appendError(message, code) { - stderrStream.write(message); - process.exitCode = code; - } - - var c = childProcess.exec(cmd, execOptions, function (err) { - if (!err) { - process.exitCode = 0; - } else if (isMaxBufferError(err)) { - appendError('maxBuffer exceeded', 1); - } else if (err.code === undefined && err.message) { - /* istanbul ignore next */ - appendError(err.message, 1); - } else if (err.code === undefined) { - /* istanbul ignore next */ - appendError('Unknown issue', 1); - } else { - process.exitCode = err.code; - } - }); - - c.stdout.pipe(stdoutStream); - c.stderr.pipe(stderrStream); - c.stdout.pipe(process.stdout); - c.stderr.pipe(process.stderr); - - if (pipe) { - c.stdin.end(pipe); - } -} - -// This file should only be executed. This module does not export anything. -/* istanbul ignore else */ -if (require.main === module) { - main(); -} diff --git a/node_modules/shelljs/src/exec.js b/node_modules/shelljs/src/exec.js deleted file mode 100644 index 3907769..0000000 --- a/node_modules/shelljs/src/exec.js +++ /dev/null @@ -1,255 +0,0 @@ -var path = require('path'); -var fs = require('fs'); -var child = require('child_process'); -var common = require('./common'); -var _tempDir = require('./tempdir').tempDir; -var _pwd = require('./pwd'); - -var DEFAULT_MAXBUFFER_SIZE = 20 * 1024 * 1024; -var DEFAULT_ERROR_CODE = 1; - -common.register('exec', _exec, { - unix: false, - canReceivePipe: true, - wrapOutput: false, - handlesFatalDynamically: true, -}); - -// We use this function to run `exec` synchronously while also providing realtime -// output. -function execSync(cmd, opts, pipe) { - if (!common.config.execPath) { - try { - common.error('Unable to find a path to the node binary. Please manually set config.execPath'); - } catch (e) { - if (opts.fatal) { - throw e; - } - - return; - } - } - - var tempDir = _tempDir(); - var paramsFile = path.join(tempDir, common.randomFileName()); - var stderrFile = path.join(tempDir, common.randomFileName()); - var stdoutFile = path.join(tempDir, common.randomFileName()); - - opts = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary - cwd: _pwd().toString(), - env: process.env, - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - encoding: 'utf8', - }, opts); - - if (fs.existsSync(paramsFile)) common.unlinkSync(paramsFile); - if (fs.existsSync(stderrFile)) common.unlinkSync(stderrFile); - if (fs.existsSync(stdoutFile)) common.unlinkSync(stdoutFile); - - opts.cwd = path.resolve(opts.cwd); - - var paramsToSerialize = { - command: cmd, - execOptions: opts, - pipe, - stdoutFile, - stderrFile, - }; - - // Create the files and ensure these are locked down (for read and write) to - // the current user. The main concerns here are: - // - // * If we execute a command which prints sensitive output, then - // stdoutFile/stderrFile must not be readable by other users. - // * paramsFile must not be readable by other users, or else they can read it - // to figure out the path for stdoutFile/stderrFile and create these first - // (locked down to their own access), which will crash exec() when it tries - // to write to the files. - function writeFileLockedDown(filePath, data) { - fs.writeFileSync(filePath, data, { - encoding: 'utf8', - mode: parseInt('600', 8), - }); - } - writeFileLockedDown(stdoutFile, ''); - writeFileLockedDown(stderrFile, ''); - writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize)); - - var execArgs = [ - path.join(__dirname, 'exec-child.js'), - paramsFile, - ]; - - /* istanbul ignore else */ - if (opts.silent) { - opts.stdio = 'ignore'; - } else { - opts.stdio = [0, 1, 2]; - } - - var code = 0; - - // Welcome to the future - try { - // Bad things if we pass in a `shell` option to child_process.execFileSync, - // so we need to explicitly remove it here. - delete opts.shell; - - child.execFileSync(common.config.execPath, execArgs, opts); - } catch (e) { - // Commands with non-zero exit code raise an exception. - code = e.status || DEFAULT_ERROR_CODE; - } - - // fs.readFileSync uses buffer encoding by default, so call - // it without the encoding option if the encoding is 'buffer'. - // Also, if the exec timeout is too short for node to start up, - // the files will not be created, so these calls will throw. - var stdout = ''; - var stderr = ''; - if (opts.encoding === 'buffer') { - stdout = fs.readFileSync(stdoutFile); - stderr = fs.readFileSync(stderrFile); - } else { - stdout = fs.readFileSync(stdoutFile, opts.encoding); - stderr = fs.readFileSync(stderrFile, opts.encoding); - } - - // No biggie if we can't erase the files now -- they're in a temp dir anyway - // and we locked down permissions (see the note above). - try { common.unlinkSync(paramsFile); } catch (e) {} - try { common.unlinkSync(stderrFile); } catch (e) {} - try { common.unlinkSync(stdoutFile); } catch (e) {} - - if (code !== 0) { - // Note: `silent` should be unconditionally true to avoid double-printing - // the command's stderr, and to avoid printing any stderr when the user has - // set `shell.config.silent`. - common.error(stderr, code, { continue: true, silent: true, fatal: opts.fatal }); - } - var obj = common.ShellString(stdout, stderr, code); - return obj; -} // execSync() - -// Wrapper around exec() to enable echoing output to console in real time -function execAsync(cmd, opts, pipe, callback) { - opts = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, // TODO(nfischer): this and the line above are probably unnecessary - cwd: _pwd().toString(), - env: process.env, - maxBuffer: DEFAULT_MAXBUFFER_SIZE, - encoding: 'utf8', - }, opts); - - var c = child.exec(cmd, opts, function (err, stdout, stderr) { - if (callback) { - if (!err) { - callback(0, stdout, stderr); - } else if (err.code === undefined) { - // See issue #536 - /* istanbul ignore next */ - callback(1, stdout, stderr); - } else { - callback(err.code, stdout, stderr); - } - } - }); - - if (pipe) c.stdin.end(pipe); - - if (!opts.silent) { - c.stdout.pipe(process.stdout); - c.stderr.pipe(process.stderr); - } - - return c; -} - -//@ -//@ ### exec(command [, options] [, callback]) -//@ -//@ Available options: -//@ -//@ + `async`: Asynchronous execution. If a callback is provided, it will be set to -//@ `true`, regardless of the passed value (default: `false`). -//@ + `fatal`: Exit upon error (default: `false`). -//@ + `silent`: Do not echo program output to console (default: `false`). -//@ + `encoding`: Character encoding to use. Affects the values returned to stdout and stderr, and -//@ what is written to stdout and stderr when not in silent mode (default: `'utf8'`). -//@ + and any option available to Node.js's -//@ [`child_process.exec()`](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var version = exec('node --version', {silent:true}).stdout; -//@ -//@ var child = exec('some_long_running_process', {async:true}); -//@ child.stdout.on('data', function(data) { -//@ /* ... do something with data ... */ -//@ }); -//@ -//@ exec('some_long_running_process', function(code, stdout, stderr) { -//@ console.log('Exit code:', code); -//@ console.log('Program output:', stdout); -//@ console.log('Program stderr:', stderr); -//@ }); -//@ ``` -//@ -//@ Executes the given `command` _synchronously_, unless otherwise specified. -//@ When in synchronous mode, this returns a [ShellString](#shellstringstr). -//@ Otherwise, this returns the child process object, and the `callback` -//@ receives the arguments `(code, stdout, stderr)`. -//@ -//@ Not seeing the behavior you want? `exec()` runs everything through `sh` -//@ by default (or `cmd.exe` on Windows), which differs from `bash`. If you -//@ need bash-specific behavior, try out the `{shell: 'path/to/bash'}` option. -//@ -//@ **Security note:** as `shell.exec()` executes an arbitrary string in the -//@ system shell, it is **critical** to properly sanitize user input to avoid -//@ **command injection**. For more context, consult the [Security -//@ Guidelines](https://github.com/shelljs/shelljs/wiki/Security-guidelines). -function _exec(command, options, callback) { - options = options || {}; - - var pipe = common.readFromPipe(); - - // Callback is defined instead of options. - if (typeof options === 'function') { - callback = options; - options = { async: true }; - } - - // Callback is defined with options. - if (typeof options === 'object' && typeof callback === 'function') { - options.async = true; - } - - options = common.extend({ - silent: common.config.silent, - fatal: common.config.fatal, - async: false, - }, options); - - if (!command) { - try { - common.error('must specify command'); - } catch (e) { - if (options.fatal) { - throw e; - } - - return; - } - } - - if (options.async) { - return execAsync(command, options, pipe, callback); - } else { - return execSync(command, options, pipe); - } -} -module.exports = _exec; diff --git a/node_modules/shelljs/src/find.js b/node_modules/shelljs/src/find.js deleted file mode 100644 index 80db993..0000000 --- a/node_modules/shelljs/src/find.js +++ /dev/null @@ -1,66 +0,0 @@ -var path = require('path'); -var common = require('./common'); -var _ls = require('./ls'); - -common.register('find', _find, { - cmdOptions: { - 'L': 'link', - }, -}); - -//@ -//@ ### find(path [, path ...]) -//@ ### find(path_array) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ find('src', 'lib'); -//@ find(['src', 'lib']); // same as above -//@ find('.').filter(function(file) { return file.match(/\.js$/); }); -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all -//@ files (however deep) in the given paths. -//@ -//@ The main difference from `ls('-R', path)` is that the resulting file names -//@ include the base directories (e.g., `lib/resources/file1` instead of just `file1`). -function _find(options, paths) { - if (!paths) { - common.error('no path specified'); - } else if (typeof paths === 'string') { - paths = [].slice.call(arguments, 1); - } - - var list = []; - - function pushFile(file) { - if (process.platform === 'win32') { - file = file.replace(/\\/g, '/'); - } - list.push(file); - } - - // why not simply do `ls('-R', paths)`? because the output wouldn't give the base dirs - // to get the base dir in the output, we need instead `ls('-R', 'dir/*')` for every directory - - paths.forEach(function (file) { - var stat; - try { - stat = common.statFollowLinks(file); - } catch (e) { - common.error('no such file or directory: ' + file); - } - - pushFile(file); - - if (stat.isDirectory()) { - _ls({ recursive: true, all: true, link: options.link }, file).forEach(function (subfile) { - pushFile(path.join(file, subfile)); - }); - } - }); - - return list; -} -module.exports = _find; diff --git a/node_modules/shelljs/src/grep.js b/node_modules/shelljs/src/grep.js deleted file mode 100644 index cfc83e4..0000000 --- a/node_modules/shelljs/src/grep.js +++ /dev/null @@ -1,198 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('grep', _grep, { - globStart: 2, // don't glob-expand the regex - canReceivePipe: true, - cmdOptions: { - 'v': 'inverse', - 'l': 'nameOnly', - 'i': 'ignoreCase', - 'n': 'lineNumber', - 'B': 'beforeContext', - 'A': 'afterContext', - 'C': 'context', - }, -}); - -//@ -//@ ### grep([options,] regex_filter, file [, file ...]) -//@ ### grep([options,] regex_filter, file_array) -//@ -//@ Available options: -//@ -//@ + `-v`: Invert `regex_filter` (only print non-matching lines). -//@ + `-l`: Print only filenames of matching files. -//@ + `-i`: Ignore case. -//@ + `-n`: Print line numbers. -//@ + `-B `: Show `` lines before each result. -//@ + `-A `: Show `` lines after each result. -//@ + `-C `: Show `` lines before and after each result. -B and -A override this option. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ grep('-v', 'GLOBAL_VARIABLE', '*.js'); -//@ grep('GLOBAL_VARIABLE', '*.js'); -//@ grep('-B', 3, 'GLOBAL_VARIABLE', '*.js'); -//@ grep({ '-B': 3 }, 'GLOBAL_VARIABLE', '*.js'); -//@ grep({ '-B': 3, '-C': 2 }, 'GLOBAL_VARIABLE', '*.js'); -//@ ``` -//@ -//@ Reads input string from given files and returns a -//@ [ShellString](#shellstringstr) containing all lines of the @ file that match -//@ the given `regex_filter`. -function _grep(options, regex, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given', 2); - - var idx = 2; - var contextError = ': invalid context length argument'; - // If the option has been found but not read, copy value from arguments - if (options.beforeContext === true) { - idx = 3; - options.beforeContext = Number(arguments[1]); - if (options.beforeContext < 0) { - common.error(options.beforeContext + contextError, 2); - } - } - if (options.afterContext === true) { - idx = 3; - options.afterContext = Number(arguments[1]); - if (options.afterContext < 0) { - common.error(options.afterContext + contextError, 2); - } - } - if (options.context === true) { - idx = 3; - options.context = Number(arguments[1]); - if (options.context < 0) { - common.error(options.context + contextError, 2); - } - } - // If before or after not given but context is, update values - if (typeof options.context === 'number') { - if (options.beforeContext === false) { - options.beforeContext = options.context; - } - if (options.afterContext === false) { - options.afterContext = options.context; - } - } - regex = arguments[idx - 1]; - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var grep = []; - if (options.ignoreCase) { - regex = new RegExp(regex, 'i'); - } - files.forEach(function (file) { - if (!fs.existsSync(file) && file !== '-') { - common.error('no such file or directory: ' + file, 2, { continue: true }); - return; - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - if (options.nameOnly) { - if (contents.match(regex)) { - grep.push(file); - } - } else { - var lines = contents.split('\n'); - var matches = []; - - lines.forEach(function (line, index) { - var matched = line.match(regex); - if ((options.inverse && !matched) || (!options.inverse && matched)) { - var lineNumber = index + 1; - var result = {}; - if (matches.length > 0) { - // If the last result intersects, combine them - var last = matches[matches.length - 1]; - var minimumLineNumber = Math.max( - 1, - lineNumber - options.beforeContext - 1, - ); - if ( - last.hasOwnProperty('' + lineNumber) || - last.hasOwnProperty('' + minimumLineNumber) - ) { - result = last; - } - } - result[lineNumber] = { - line, - match: true, - }; - if (options.beforeContext > 0) { - // Store the lines with their line numbers to check for overlap - lines - .slice(Math.max(index - options.beforeContext, 0), index) - .forEach(function (v, i, a) { - var lineNum = '' + (index - a.length + i + 1); - if (!result.hasOwnProperty(lineNum)) { - result[lineNum] = { line: v, match: false }; - } - }); - } - if (options.afterContext > 0) { - // Store the lines with their line numbers to check for overlap - lines - .slice( - index + 1, - Math.min(index + options.afterContext + 1, lines.length - 1), - ) - .forEach(function (v, i) { - var lineNum = '' + (index + 1 + i + 1); - if (!result.hasOwnProperty(lineNum)) { - result[lineNum] = { line: v, match: false }; - } - }); - } - // Only add the result if it's new - if (!matches.includes(result)) { - matches.push(result); - } - } - }); - - // Loop through the matches and add them to the output - Array.prototype.push.apply( - grep, - matches.map(function (result) { - return Object.entries(result) - .map(function (entry) { - var lineNumber = entry[0]; - var line = entry[1].line; - var match = entry[1].match; - return options.lineNumber - ? lineNumber + (match ? ':' : '-') + line - : line; - }) - .join('\n'); - }), - ); - } - }); - - if (grep.length === 0 && common.state.errorCode !== 2) { - // We didn't hit the error above, but pattern didn't match - common.error('', { silent: true }); - } - - var separator = '\n'; - if ( - typeof options.beforeContext === 'number' || - typeof options.afterContext === 'number' - ) { - separator = '\n--\n'; - } - return grep.join(separator) + '\n'; -} -module.exports = _grep; diff --git a/node_modules/shelljs/src/head.js b/node_modules/shelljs/src/head.js deleted file mode 100644 index f3f4f22..0000000 --- a/node_modules/shelljs/src/head.js +++ /dev/null @@ -1,107 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('head', _head, { - canReceivePipe: true, - cmdOptions: { - 'n': 'numLines', - }, -}); - -// Reads |numLines| lines or the entire file, whichever is less. -function readSomeLines(file, numLines) { - var buf = common.buffer(); - var bufLength = buf.length; - var bytesRead = bufLength; - var pos = 0; - - var fdr = fs.openSync(file, 'r'); - var numLinesRead = 0; - var ret = ''; - while (bytesRead === bufLength && numLinesRead < numLines) { - bytesRead = fs.readSync(fdr, buf, 0, bufLength, pos); - var bufStr = buf.toString('utf8', 0, bytesRead); - numLinesRead += bufStr.split('\n').length - 1; - ret += bufStr; - pos += bytesRead; - } - - fs.closeSync(fdr); - return ret; -} - -//@ -//@ ### head([{'-n': \},] file [, file ...]) -//@ ### head([{'-n': \},] file_array) -//@ -//@ Available options: -//@ -//@ + `-n `: Show the first `` lines of the files -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = head({'-n': 1}, 'file*.txt'); -//@ var str = head('file1', 'file2'); -//@ var str = head(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Read the start of a `file`. Returns a [ShellString](#shellstringstr). -function _head(options, files) { - var head = []; - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given'); - - var idx = 1; - if (options.numLines === true) { - idx = 2; - options.numLines = Number(arguments[1]); - } else if (options.numLines === false) { - options.numLines = 10; - } - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var shouldAppendNewline = false; - files.forEach(function (file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error("error reading '" + file + "': Is a directory", { - continue: true, - }); - return; - } - } - - var contents; - if (file === '-') { - contents = pipe; - } else if (options.numLines < 0) { - contents = fs.readFileSync(file, 'utf8'); - } else { - contents = readSomeLines(file, options.numLines); - } - - var lines = contents.split('\n'); - var hasTrailingNewline = (lines[lines.length - 1] === ''); - if (hasTrailingNewline) { - lines.pop(); - } - shouldAppendNewline = (hasTrailingNewline || options.numLines < lines.length); - - head = head.concat(lines.slice(0, options.numLines)); - }); - - if (shouldAppendNewline) { - head.push(''); // to add a trailing newline once we join - } - return head.join('\n'); -} -module.exports = _head; diff --git a/node_modules/shelljs/src/ln.js b/node_modules/shelljs/src/ln.js deleted file mode 100644 index 1d3d0e7..0000000 --- a/node_modules/shelljs/src/ln.js +++ /dev/null @@ -1,75 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('ln', _ln, { - cmdOptions: { - 's': 'symlink', - 'f': 'force', - }, -}); - -//@ -//@ ### ln([options,] source, dest) -//@ -//@ Available options: -//@ -//@ + `-s`: symlink -//@ + `-f`: force -//@ -//@ Examples: -//@ -//@ ```javascript -//@ ln('file', 'newlink'); -//@ ln('-sf', 'file', 'existing'); -//@ ``` -//@ -//@ Links `source` to `dest`. Use `-f` to force the link, should `dest` already -//@ exist. Returns a [ShellString](#shellstringstr) indicating success or -//@ failure. -function _ln(options, source, dest) { - if (!source || !dest) { - common.error('Missing and/or '); - } - - source = String(source); - var sourcePath = path.normalize(source).replace(RegExp(path.sep + '$'), ''); - var isAbsolute = (path.resolve(source) === sourcePath); - dest = path.resolve(process.cwd(), String(dest)); - - if (fs.existsSync(dest)) { - if (!options.force) { - common.error('Destination file exists', { continue: true }); - } - - fs.unlinkSync(dest); - } - - if (options.symlink) { - var isWindows = process.platform === 'win32'; - var linkType = isWindows ? 'file' : null; - var resolvedSourcePath = isAbsolute ? sourcePath : path.resolve(process.cwd(), path.dirname(dest), source); - if (!fs.existsSync(resolvedSourcePath)) { - common.error('Source file does not exist', { continue: true }); - } else if (isWindows && common.statFollowLinks(resolvedSourcePath).isDirectory()) { - linkType = 'junction'; - } - - try { - fs.symlinkSync(linkType === 'junction' ? resolvedSourcePath : source, dest, linkType); - } catch (err) { - common.error(err.message); - } - } else { - if (!fs.existsSync(source)) { - common.error('Source file does not exist', { continue: true }); - } - try { - fs.linkSync(source, dest); - } catch (err) { - common.error(err.message); - } - } - return ''; -} -module.exports = _ln; diff --git a/node_modules/shelljs/src/ls.js b/node_modules/shelljs/src/ls.js deleted file mode 100644 index 7f32c6e..0000000 --- a/node_modules/shelljs/src/ls.js +++ /dev/null @@ -1,155 +0,0 @@ -var path = require('path'); -var fs = require('fs'); -var glob = require('fast-glob'); -var common = require('./common'); - -// glob patterns use the UNIX path seperator -var globPatternRecursive = '/**'; - -common.register('ls', _ls, { - cmdOptions: { - 'R': 'recursive', - 'A': 'all', - 'L': 'link', - 'a': 'all_deprecated', - 'd': 'directory', - 'l': 'long', - }, -}); - -//@ -//@ ### ls([options,] [path, ...]) -//@ ### ls([options,] path_array) -//@ -//@ Available options: -//@ -//@ + `-R`: recursive -//@ + `-A`: all files (include files beginning with `.`, except for `.` and `..`) -//@ + `-L`: follow symlinks -//@ + `-d`: list directories themselves, not their contents -//@ + `-l`: provides more details for each file. Specifically, each file is -//@ represented by a structured object with separate fields for file -//@ metadata (see -//@ [`fs.Stats`](https://nodejs.org/api/fs.html#fs_class_fs_stats)). The -//@ return value also overrides `.toString()` to resemble `ls -l`'s -//@ output format for human readability, but programmatic usage should -//@ depend on the stable object format rather than the `.toString()` -//@ representation. -//@ -//@ Examples: -//@ -//@ ```javascript -//@ ls('projs/*.js'); -//@ ls('projs/**/*.js'); // Find all js files recursively in projs -//@ ls('-R', '/users/me', '/tmp'); -//@ ls('-R', ['/users/me', '/tmp']); // same as above -//@ ls('-l', 'file.txt'); // { name: 'file.txt', mode: 33188, nlink: 1, ...} -//@ ``` -//@ -//@ Returns a [ShellString](#shellstringstr) (with array-like properties) of all -//@ the files in the given `path`, or files in the current directory if no -//@ `path` is provided. -function _ls(options, paths) { - if (options.all_deprecated) { - // We won't support the -a option as it's hard to image why it's useful - // (it includes '.' and '..' in addition to '.*' files) - // For backwards compatibility we'll dump a deprecated message and proceed as before - common.log('ls: Option -a is deprecated. Use -A instead'); - options.all = true; - } - - if (!paths) { - paths = ['.']; - } else { - paths = [].slice.call(arguments, 1); - } - - var list = []; - - function pushFile(abs, relName, stat) { - if (process.platform === 'win32') { - relName = relName.replace(/\\/g, '/'); - } - if (options.long) { - stat = stat || (options.link ? common.statFollowLinks(abs) : common.statNoFollowLinks(abs)); - list.push(addLsAttributes(relName, stat)); - } else { - // list.push(path.relative(rel || '.', file)); - list.push(relName); - } - } - - paths.forEach(function (p) { - var stat; - - try { - stat = options.link ? common.statFollowLinks(p) : common.statNoFollowLinks(p); - // follow links to directories by default - if (stat.isSymbolicLink()) { - /* istanbul ignore next */ - // workaround for https://github.com/shelljs/shelljs/issues/795 - // codecov seems to have a bug that miscalculate this block as uncovered. - // but according to nyc report this block does get covered. - try { - var _stat = common.statFollowLinks(p); - if (_stat.isDirectory()) { - stat = _stat; - } - } catch (_) {} // bad symlink, treat it like a file - } - } catch (e) { - common.error('no such file or directory: ' + p, 2, { continue: true }); - return; - } - - // If the stat succeeded - if (stat.isDirectory() && !options.directory) { - if (options.recursive) { - // use glob, because it's simple - glob.sync(p + globPatternRecursive, { - // These options are just to make fast-glob be compatible with POSIX - // (bash) wildcard behavior. - onlyFiles: false, - - // These options depend on the cmdOptions provided to ls. - dot: options.all, - followSymbolicLinks: options.link, - }).forEach(function (item) { - // Glob pattern returns the directory itself and needs to be filtered out. - if (path.relative(p, item)) { - pushFile(item, path.relative(p, item)); - } - }); - } else if (options.all) { - // use fs.readdirSync, because it's fast - fs.readdirSync(p).forEach(function (item) { - pushFile(path.join(p, item), item); - }); - } else { - // use fs.readdirSync and then filter out secret files - fs.readdirSync(p).forEach(function (item) { - if (item[0] !== '.') { - pushFile(path.join(p, item), item); - } - }); - } - } else { - pushFile(p, p, stat); - } - }); - - // Add methods, to make this more compatible with ShellStrings - return list; -} - -function addLsAttributes(pathName, stats) { - // Note: this object will contain more information than .toString() returns - stats.name = pathName; - stats.toString = function () { - // Return a string resembling unix's `ls -l` format - return [this.mode, this.nlink, this.uid, this.gid, this.size, this.mtime, this.name].join(' '); - }; - return stats; -} - -module.exports = _ls; diff --git a/node_modules/shelljs/src/mkdir.js b/node_modules/shelljs/src/mkdir.js deleted file mode 100644 index 021cad9..0000000 --- a/node_modules/shelljs/src/mkdir.js +++ /dev/null @@ -1,102 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('mkdir', _mkdir, { - cmdOptions: { - 'p': 'fullpath', - }, -}); - -// Recursively creates `dir` -function mkdirSyncRecursive(dir) { - var baseDir = path.dirname(dir); - - // Prevents some potential problems arising from malformed UNCs or - // insufficient permissions. - /* istanbul ignore next */ - if (baseDir === dir) { - common.error('dirname() failed: [' + dir + ']'); - } - - // Base dir does not exist, go recursive - if (!fs.existsSync(baseDir)) { - mkdirSyncRecursive(baseDir); - } - - try { - // Base dir created, can create dir - fs.mkdirSync(dir, parseInt('0777', 8)); - } catch (e) { - // swallow error if dir already exists - if (e.code !== 'EEXIST' || common.statNoFollowLinks(dir).isFile()) { throw e; } - } -} - -//@ -//@ ### mkdir([options,] dir [, dir ...]) -//@ ### mkdir([options,] dir_array) -//@ -//@ Available options: -//@ -//@ + `-p`: full path (and create intermediate directories, if necessary) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ mkdir('-p', '/tmp/a/b/c/d', '/tmp/e/f/g'); -//@ mkdir('-p', ['/tmp/a/b/c/d', '/tmp/e/f/g']); // same as above -//@ ``` -//@ -//@ Creates directories. Returns a [ShellString](#shellstringstr) indicating -//@ success or failure. -function _mkdir(options, dirs) { - if (!dirs) common.error('no paths given'); - - if (typeof dirs === 'string') { - dirs = [].slice.call(arguments, 1); - } - // if it's array leave it as it is - - dirs.forEach(function (dir) { - try { - var stat = common.statNoFollowLinks(dir); - if (!options.fullpath) { - common.error('path already exists: ' + dir, { continue: true }); - } else if (stat.isFile()) { - common.error('cannot create directory ' + dir + ': File exists', { continue: true }); - } - return; // skip dir - } catch (e) { - // do nothing - } - - // Base dir does not exist, and no -p option given - var baseDir = path.dirname(dir); - if (!fs.existsSync(baseDir) && !options.fullpath) { - common.error('no such file or directory: ' + baseDir, { continue: true }); - return; // skip dir - } - - try { - if (options.fullpath) { - mkdirSyncRecursive(path.resolve(dir)); - } else { - fs.mkdirSync(dir, parseInt('0777', 8)); - } - } catch (e) { - var reason; - if (e.code === 'EACCES') { - reason = 'Permission denied'; - } else if (e.code === 'ENOTDIR' || e.code === 'ENOENT') { - reason = 'Not a directory'; - } else { - /* istanbul ignore next */ - throw e; - } - common.error('cannot create directory ' + dir + ': ' + reason, { continue: true }); - } - }); - return ''; -} // man arraykdir -module.exports = _mkdir; diff --git a/node_modules/shelljs/src/mv.js b/node_modules/shelljs/src/mv.js deleted file mode 100644 index 6e89e2f..0000000 --- a/node_modules/shelljs/src/mv.js +++ /dev/null @@ -1,119 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); -var cp = require('./cp'); -var rm = require('./rm'); - -common.register('mv', _mv, { - cmdOptions: { - 'f': '!no_force', - 'n': 'no_force', - }, -}); - -// Checks if cureent file was created recently -function checkRecentCreated(sources, index) { - var lookedSource = sources[index]; - return sources.slice(0, index).some(function (src) { - return path.basename(src) === path.basename(lookedSource); - }); -} - -//@ -//@ ### mv([options ,] source [, source ...], dest') -//@ ### mv([options ,] source_array, dest') -//@ -//@ Available options: -//@ -//@ + `-f`: force (default behavior) -//@ + `-n`: no-clobber -//@ -//@ Examples: -//@ -//@ ```javascript -//@ mv('-n', 'file', 'dir/'); -//@ mv('file1', 'file2', 'dir/'); -//@ mv(['file1', 'file2'], 'dir/'); // same as above -//@ ``` -//@ -//@ Moves `source` file(s) to `dest`. Returns a [ShellString](#shellstringstr) -//@ indicating success or failure. -function _mv(options, sources, dest) { - // Get sources, dest - if (arguments.length < 3) { - common.error('missing and/or '); - } else if (arguments.length > 3) { - sources = [].slice.call(arguments, 1, arguments.length - 1); - dest = arguments[arguments.length - 1]; - } else if (typeof sources === 'string') { - sources = [sources]; - } else { - // TODO(nate): figure out if we actually need this line - common.error('invalid arguments'); - } - - var exists = fs.existsSync(dest); - var stats = exists && common.statFollowLinks(dest); - - // Dest is not existing dir, but multiple sources given - if ((!exists || !stats.isDirectory()) && sources.length > 1) { - common.error('dest is not a directory (too many sources)'); - } - - // Dest is an existing file, but no -f given - if (exists && stats.isFile() && options.no_force) { - common.error('dest file already exists: ' + dest); - } - - sources.forEach(function (src, srcIndex) { - if (!fs.existsSync(src)) { - common.error('no such file or directory: ' + src, { continue: true }); - return; // skip file - } - - // If here, src exists - - // When copying to '/path/dir': - // thisDest = '/path/dir/file1' - var thisDest = dest; - if (fs.existsSync(dest) && common.statFollowLinks(dest).isDirectory()) { - thisDest = path.normalize(dest + '/' + path.basename(src)); - } - - var thisDestExists = fs.existsSync(thisDest); - - if (thisDestExists && checkRecentCreated(sources, srcIndex)) { - // cannot overwrite file created recently in current execution, but we want to continue copying other files - if (!options.no_force) { - common.error("will not overwrite just-created '" + thisDest + "' with '" + src + "'", { continue: true }); - } - return; - } - - if (fs.existsSync(thisDest) && options.no_force) { - common.error('dest file already exists: ' + thisDest, { continue: true }); - return; // skip file - } - - if (path.resolve(src) === path.dirname(path.resolve(thisDest))) { - common.error('cannot move to self: ' + src, { continue: true }); - return; // skip file - } - - try { - fs.renameSync(src, thisDest); - } catch (e) { - /* istanbul ignore next */ - if (e.code === 'EXDEV') { - // If we're trying to `mv` to an external partition, we'll actually need - // to perform a copy and then clean up the original file. If either the - // copy or the rm fails with an exception, we should allow this - // exception to pass up to the top level. - cp({ recursive: true }, src, thisDest); - rm({ recursive: true, force: true }, src); - } - } - }); // forEach(src) - return ''; -} // mv -module.exports = _mv; diff --git a/node_modules/shelljs/src/popd.js b/node_modules/shelljs/src/popd.js deleted file mode 100644 index d9eac3f..0000000 --- a/node_modules/shelljs/src/popd.js +++ /dev/null @@ -1 +0,0 @@ -// see dirs.js diff --git a/node_modules/shelljs/src/pushd.js b/node_modules/shelljs/src/pushd.js deleted file mode 100644 index d9eac3f..0000000 --- a/node_modules/shelljs/src/pushd.js +++ /dev/null @@ -1 +0,0 @@ -// see dirs.js diff --git a/node_modules/shelljs/src/pwd.js b/node_modules/shelljs/src/pwd.js deleted file mode 100644 index 8527d8b..0000000 --- a/node_modules/shelljs/src/pwd.js +++ /dev/null @@ -1,16 +0,0 @@ -var path = require('path'); -var common = require('./common'); - -common.register('pwd', _pwd, { - allowGlobbing: false, -}); - -//@ -//@ ### pwd() -//@ -//@ Returns the current directory as a [ShellString](#shellstringstr). -function _pwd() { - var pwd = path.resolve(process.cwd()); - return pwd; -} -module.exports = _pwd; diff --git a/node_modules/shelljs/src/rm.js b/node_modules/shelljs/src/rm.js deleted file mode 100644 index 6bb5755..0000000 --- a/node_modules/shelljs/src/rm.js +++ /dev/null @@ -1,201 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('rm', _rm, { - cmdOptions: { - 'f': 'force', - 'r': 'recursive', - 'R': 'recursive', - }, -}); - -// Recursively removes 'dir' -// Adapted from https://github.com/ryanmcgrath/wrench-js -// -// Copyright (c) 2010 Ryan McGrath -// Copyright (c) 2012 Artur Adib -// -// Licensed under the MIT License -// http://www.opensource.org/licenses/mit-license.php -function rmdirSyncRecursive(dir, force, fromSymlink) { - var files; - - files = fs.readdirSync(dir); - - // Loop through and delete everything in the sub-tree after checking it - for (var i = 0; i < files.length; i++) { - var file = dir + '/' + files[i]; - var currFile = common.statNoFollowLinks(file); - - if (currFile.isDirectory()) { // Recursive function back to the beginning - rmdirSyncRecursive(file, force); - } else if (force || isWriteable(file)) { - // Assume it's a file - perhaps a try/catch belongs here? - try { - common.unlinkSync(file); - } catch (e) { - /* istanbul ignore next */ - common.error('could not remove file (code ' + e.code + '): ' + file, { - continue: true, - }); - } - } - } - - // if was directory was referenced through a symbolic link, - // the contents should be removed, but not the directory itself - if (fromSymlink) return; - - // Now that we know everything in the sub-tree has been deleted, we can delete the main directory. - // Huzzah for the shopkeep. - - var result; - try { - // Retry on windows, sometimes it takes a little time before all the files in the directory are gone - var start = Date.now(); - - // TODO: replace this with a finite loop - for (;;) { - try { - result = fs.rmdirSync(dir); - if (fs.existsSync(dir)) throw { code: 'EAGAIN' }; - break; - } catch (er) { - /* istanbul ignore next */ - // In addition to error codes, also check if the directory still exists and loop again if true - if (process.platform === 'win32' && (er.code === 'ENOTEMPTY' || er.code === 'EBUSY' || er.code === 'EPERM' || er.code === 'EAGAIN')) { - if (Date.now() - start > 1000) throw er; - } else if (er.code === 'ENOENT') { - // Directory did not exist, deletion was successful - break; - } else { - throw er; - } - } - } - } catch (e) { - common.error('could not remove directory (code ' + e.code + '): ' + dir, { continue: true }); - } - - return result; -} // rmdirSyncRecursive - -// Hack to determine if file has write permissions for current user -// Avoids having to check user, group, etc, but it's probably slow -function isWriteable(file) { - var writePermission = true; - try { - var __fd = fs.openSync(file, 'a'); - fs.closeSync(__fd); - } catch (e) { - writePermission = false; - } - - return writePermission; -} - -function handleFile(file, options) { - if (options.force || isWriteable(file)) { - // -f was passed, or file is writable, so it can be removed - common.unlinkSync(file); - } else { - common.error('permission denied: ' + file, { continue: true }); - } -} - -function handleDirectory(file, options) { - if (options.recursive) { - // -r was passed, so directory can be removed - rmdirSyncRecursive(file, options.force); - } else { - common.error('path is a directory', { continue: true }); - } -} - -function handleSymbolicLink(file, options) { - var stats; - try { - stats = common.statFollowLinks(file); - } catch (e) { - // symlink is broken, so remove the symlink itself - common.unlinkSync(file); - return; - } - - if (stats.isFile()) { - common.unlinkSync(file); - } else if (stats.isDirectory()) { - if (file[file.length - 1] === '/') { - // trailing separator, so remove the contents, not the link - if (options.recursive) { - // -r was passed, so directory can be removed - var fromSymlink = true; - rmdirSyncRecursive(file, options.force, fromSymlink); - } else { - common.error('path is a directory', { continue: true }); - } - } else { - // no trailing separator, so remove the link - common.unlinkSync(file); - } - } -} - -function handleFIFO(file) { - common.unlinkSync(file); -} - -//@ -//@ ### rm([options,] file [, file ...]) -//@ ### rm([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-f`: force -//@ + `-r, -R`: recursive -//@ -//@ Examples: -//@ -//@ ```javascript -//@ rm('-rf', '/tmp/*'); -//@ rm('some_file.txt', 'another_file.txt'); -//@ rm(['some_file.txt', 'another_file.txt']); // same as above -//@ ``` -//@ -//@ Removes files. Returns a [ShellString](#shellstringstr) indicating success -//@ or failure. -function _rm(options, files) { - if (!files) common.error('no paths given'); - - // Convert to array - files = [].slice.call(arguments, 1); - - files.forEach(function (file) { - var lstats; - try { - var filepath = (file[file.length - 1] === '/') - ? file.slice(0, -1) // remove the '/' so lstatSync can detect symlinks - : file; - lstats = common.statNoFollowLinks(filepath); // test for existence - } catch (e) { - // Path does not exist, no force flag given - if (!options.force) { - common.error('no such file or directory: ' + file, { continue: true }); - } - return; // skip file - } - - // If here, path exists - if (lstats.isFile()) { - handleFile(file, options); - } else if (lstats.isDirectory()) { - handleDirectory(file, options); - } else if (lstats.isSymbolicLink()) { - handleSymbolicLink(file, options); - } else if (lstats.isFIFO()) { - handleFIFO(file); - } - }); // forEach(file) - return ''; -} // rm -module.exports = _rm; diff --git a/node_modules/shelljs/src/sed.js b/node_modules/shelljs/src/sed.js deleted file mode 100644 index 6936523..0000000 --- a/node_modules/shelljs/src/sed.js +++ /dev/null @@ -1,95 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('sed', _sed, { - globStart: 3, // don't glob-expand regexes - canReceivePipe: true, - cmdOptions: { - 'i': 'inplace', - }, -}); - -//@ -//@ ### sed([options,] search_regex, replacement, file [, file ...]) -//@ ### sed([options,] search_regex, replacement, file_array) -//@ -//@ Available options: -//@ -//@ + `-i`: Replace contents of `file` in-place. _Note that no backups will be created!_ -//@ -//@ Examples: -//@ -//@ ```javascript -//@ sed('-i', 'PROGRAM_VERSION', 'v0.1.3', 'source.js'); -//@ ``` -//@ -//@ Reads an input string from `file`s, line by line, and performs a JavaScript `replace()` on -//@ each of the lines from the input string using the given `search_regex` and `replacement` string or -//@ function. Returns the new [ShellString](#shellstringstr) after replacement. -//@ -//@ Note: -//@ -//@ Like unix `sed`, ShellJS `sed` supports capture groups. Capture groups are specified -//@ using the `$n` syntax: -//@ -//@ ```javascript -//@ sed(/(\w+)\s(\w+)/, '$2, $1', 'file.txt'); -//@ ``` -//@ -//@ Also, like unix `sed`, ShellJS `sed` runs replacements on each line from the input file -//@ (split by '\n') separately, so `search_regex`es that span more than one line (or include '\n') -//@ will not match anything and nothing will be replaced. -function _sed(options, regex, replacement, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (typeof replacement !== 'string' && typeof replacement !== 'function') { - if (typeof replacement === 'number') { - replacement = replacement.toString(); // fallback - } else { - common.error('invalid replacement string'); - } - } - - // Convert all search strings to RegExp - if (typeof regex === 'string') { - regex = RegExp(regex); - } - - if (!files && !pipe) { - common.error('no files given'); - } - - files = [].slice.call(arguments, 3); - - if (pipe) { - files.unshift('-'); - } - - var sed = []; - files.forEach(function (file) { - if (!fs.existsSync(file) && file !== '-') { - common.error('no such file or directory: ' + file, 2, { continue: true }); - return; - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - var lines = contents.split('\n'); - var result = lines.map(function (line) { - return line.replace(regex, replacement); - }).join('\n'); - - sed.push(result); - - if (options.inplace) { - fs.writeFileSync(file, result, 'utf8'); - } - }); - - if (options.inplace) { - return ''; - } else { - return sed.join('\n'); - } -} -module.exports = _sed; diff --git a/node_modules/shelljs/src/set.js b/node_modules/shelljs/src/set.js deleted file mode 100644 index 6f37bc9..0000000 --- a/node_modules/shelljs/src/set.js +++ /dev/null @@ -1,55 +0,0 @@ -var common = require('./common'); - -common.register('set', _set, { - allowGlobbing: false, - wrapOutput: false, -}); - -//@ -//@ ### set(options) -//@ -//@ Available options: -//@ -//@ + `+/-e`: exit upon error (`config.fatal`) -//@ + `+/-v`: verbose: show all commands (`config.verbose`) -//@ + `+/-f`: disable filename expansion (globbing) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ set('-e'); // exit upon first error -//@ set('+e'); // this undoes a "set('-e')" -//@ ``` -//@ -//@ Sets global configuration variables. -function _set(options) { - if (!options) { - var args = [].slice.call(arguments, 0); - if (args.length < 2) common.error('must provide an argument'); - options = args[1]; - } - var negate = (options[0] === '+'); - if (negate) { - options = '-' + options.slice(1); // parseOptions needs a '-' prefix - } - options = common.parseOptions(options, { - 'e': 'fatal', - 'v': 'verbose', - 'f': 'noglob', - }); - - if (negate) { - Object.keys(options).forEach(function (key) { - options[key] = !options[key]; - }); - } - - Object.keys(options).forEach(function (key) { - // Only change the global config if `negate` is false and the option is true - // or if `negate` is true and the option is false (aka negate !== option) - if (negate !== options[key]) { - common.config[key] = options[key]; - } - }); -} -module.exports = _set; diff --git a/node_modules/shelljs/src/sort.js b/node_modules/shelljs/src/sort.js deleted file mode 100644 index 66b042c..0000000 --- a/node_modules/shelljs/src/sort.js +++ /dev/null @@ -1,98 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('sort', _sort, { - canReceivePipe: true, - cmdOptions: { - 'r': 'reverse', - 'n': 'numerical', - }, -}); - -// parse out the number prefix of a line -function parseNumber(str) { - var match = str.match(/^\s*(\d*)\s*(.*)$/); - return { num: Number(match[1]), value: match[2] }; -} - -// compare two strings case-insensitively, but examine case for strings that are -// case-insensitive equivalent -function unixCmp(a, b) { - var aLower = a.toLowerCase(); - var bLower = b.toLowerCase(); - return (aLower === bLower ? - -1 * a.localeCompare(b) : // unix sort treats case opposite how javascript does - aLower.localeCompare(bLower)); -} - -// compare two strings in the fashion that unix sort's -n option works -function numericalCmp(a, b) { - var objA = parseNumber(a); - var objB = parseNumber(b); - if (objA.hasOwnProperty('num') && objB.hasOwnProperty('num')) { - return ((objA.num !== objB.num) ? - (objA.num - objB.num) : - unixCmp(objA.value, objB.value)); - } else { - return unixCmp(objA.value, objB.value); - } -} - -//@ -//@ ### sort([options,] file [, file ...]) -//@ ### sort([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-r`: Reverse the results -//@ + `-n`: Compare according to numerical value -//@ -//@ Examples: -//@ -//@ ```javascript -//@ sort('foo.txt', 'bar.txt'); -//@ sort('-r', 'foo.txt'); -//@ ``` -//@ -//@ Return the contents of the `file`s, sorted line-by-line as a -//@ [ShellString](#shellstringstr). Sorting multiple files mixes their content -//@ (just as unix `sort` does). -function _sort(options, files) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no files given'); - - files = [].slice.call(arguments, 1); - - if (pipe) { - files.unshift('-'); - } - - var lines = files.reduce(function (accum, file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return accum; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error('read failed: ' + file + ': Is a directory', { - continue: true, - }); - return accum; - } - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - return accum.concat(contents.trimRight().split('\n')); - }, []); - - var sorted = lines.sort(options.numerical ? numericalCmp : unixCmp); - - if (options.reverse) { - sorted = sorted.reverse(); - } - - return sorted.join('\n') + '\n'; -} - -module.exports = _sort; diff --git a/node_modules/shelljs/src/tail.js b/node_modules/shelljs/src/tail.js deleted file mode 100644 index eee75c5..0000000 --- a/node_modules/shelljs/src/tail.js +++ /dev/null @@ -1,90 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('tail', _tail, { - canReceivePipe: true, - cmdOptions: { - 'n': 'numLines', - }, -}); - -//@ -//@ ### tail([{'-n': \},] file [, file ...]) -//@ ### tail([{'-n': \},] file_array) -//@ -//@ Available options: -//@ -//@ + `-n `: Show the last `` lines of `file`s -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var str = tail({'-n': 1}, 'file*.txt'); -//@ var str = tail('file1', 'file2'); -//@ var str = tail(['file1', 'file2']); // same as above -//@ ``` -//@ -//@ Read the end of a `file`. Returns a [ShellString](#shellstringstr). -function _tail(options, files) { - var tail = []; - var pipe = common.readFromPipe(); - - if (!files && !pipe) common.error('no paths given'); - - var idx = 1; - var plusOption = false; - if (options.numLines === true) { - idx = 2; - if (arguments[1][0] === '+') { - plusOption = true; - } - options.numLines = Number(arguments[1]); - } else if (options.numLines === false) { - options.numLines = 10; - } - // arguments[0] is a json object - if (arguments[0].numLines[0] === '+') { - plusOption = true; - } - options.numLines = -1 * Math.abs(options.numLines); - files = [].slice.call(arguments, idx); - - if (pipe) { - files.unshift('-'); - } - - var shouldAppendNewline = false; - files.forEach(function (file) { - if (file !== '-') { - if (!fs.existsSync(file)) { - common.error('no such file or directory: ' + file, { continue: true }); - return; - } else if (common.statFollowLinks(file).isDirectory()) { - common.error("error reading '" + file + "': Is a directory", { - continue: true, - }); - return; - } - } - - var contents = file === '-' ? pipe : fs.readFileSync(file, 'utf8'); - - var lines = contents.split('\n'); - if (lines[lines.length - 1] === '') { - lines.pop(); - shouldAppendNewline = true; - } else { - shouldAppendNewline = false; - } - - tail = tail.concat(plusOption ? lines.slice(-options.numLines - 1) : lines.slice(options.numLines)); - }); - - if (shouldAppendNewline) { - tail.push(''); // to add a trailing newline once we join - } - - return tail.join('\n'); -} - -module.exports = _tail; diff --git a/node_modules/shelljs/src/tempdir.js b/node_modules/shelljs/src/tempdir.js deleted file mode 100644 index b6f7796..0000000 --- a/node_modules/shelljs/src/tempdir.js +++ /dev/null @@ -1,75 +0,0 @@ -var os = require('os'); -var fs = require('fs'); -var common = require('./common'); - -common.register('tempdir', _tempDir, { - allowGlobbing: false, - wrapOutput: false, -}); - -// Returns false if 'dir' is not a writeable directory, 'dir' otherwise -function writeableDir(dir) { - if (!dir || !fs.existsSync(dir)) return false; - - if (!common.statFollowLinks(dir).isDirectory()) return false; - - var testFile = dir + '/' + common.randomFileName(); - try { - fs.writeFileSync(testFile, ' '); - common.unlinkSync(testFile); - return dir; - } catch (e) { - /* istanbul ignore next */ - return false; - } -} - -// Variable to cache the tempdir value for successive lookups. -var cachedTempDir; - -//@ -//@ ### tempdir() -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var tmp = tempdir(); // "/tmp" for most *nix platforms -//@ ``` -//@ -//@ Searches and returns string containing a writeable, platform-dependent temporary directory. -//@ Follows Python's [tempfile algorithm](http://docs.python.org/library/tempfile.html#tempfile.tempdir). -function _tempDir() { - if (cachedTempDir) return cachedTempDir; - - cachedTempDir = writeableDir(os.tmpdir()) || - writeableDir(process.env.TMPDIR) || - writeableDir(process.env.TEMP) || - writeableDir(process.env.TMP) || - writeableDir(process.env.Wimp$ScrapDir) || // RiscOS - writeableDir('C:\\TEMP') || // Windows - writeableDir('C:\\TMP') || // Windows - writeableDir('\\TEMP') || // Windows - writeableDir('\\TMP') || // Windows - writeableDir('/tmp') || - writeableDir('/var/tmp') || - writeableDir('/usr/tmp') || - writeableDir('.'); // last resort - - return cachedTempDir; -} - -// Indicates if the tempdir value is currently cached. This is exposed for tests -// only. The return value should only be tested for truthiness. -function isCached() { - return cachedTempDir; -} - -// Clears the cached tempDir value, if one is cached. This is exposed for tests -// only. -function clearCache() { - cachedTempDir = undefined; -} - -module.exports.tempDir = _tempDir; -module.exports.isCached = isCached; -module.exports.clearCache = clearCache; diff --git a/node_modules/shelljs/src/test.js b/node_modules/shelljs/src/test.js deleted file mode 100644 index 7e76908..0000000 --- a/node_modules/shelljs/src/test.js +++ /dev/null @@ -1,86 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('test', _test, { - cmdOptions: { - 'b': 'block', - 'c': 'character', - 'd': 'directory', - 'e': 'exists', - 'f': 'file', - 'L': 'link', - 'p': 'pipe', - 'S': 'socket', - }, - wrapOutput: false, - allowGlobbing: false, -}); - - -//@ -//@ ### test(expression) -//@ -//@ Available expression primaries: -//@ -//@ + `'-b', 'path'`: true if path is a block device -//@ + `'-c', 'path'`: true if path is a character device -//@ + `'-d', 'path'`: true if path is a directory -//@ + `'-e', 'path'`: true if path exists -//@ + `'-f', 'path'`: true if path is a regular file -//@ + `'-L', 'path'`: true if path is a symbolic link -//@ + `'-p', 'path'`: true if path is a pipe (FIFO) -//@ + `'-S', 'path'`: true if path is a socket -//@ -//@ Examples: -//@ -//@ ```javascript -//@ if (test('-d', path)) { /* do something with dir */ }; -//@ if (!test('-f', path)) continue; // skip if it's not a regular file -//@ ``` -//@ -//@ Evaluates `expression` using the available primaries and returns -//@ corresponding boolean value. -function _test(options, path) { - if (!path) common.error('no path given'); - - var canInterpret = false; - Object.keys(options).forEach(function (key) { - if (options[key] === true) { - canInterpret = true; - } - }); - - if (!canInterpret) common.error('could not interpret expression'); - - if (options.link) { - try { - return common.statNoFollowLinks(path).isSymbolicLink(); - } catch (e) { - return false; - } - } - - if (!fs.existsSync(path)) return false; - - if (options.exists) return true; - - var stats = common.statFollowLinks(path); - - if (options.block) return stats.isBlockDevice(); - - if (options.character) return stats.isCharacterDevice(); - - if (options.directory) return stats.isDirectory(); - - if (options.file) return stats.isFile(); - - /* istanbul ignore next */ - if (options.pipe) return stats.isFIFO(); - - /* istanbul ignore next */ - if (options.socket) return stats.isSocket(); - - /* istanbul ignore next */ - return false; // fallback -} // test -module.exports = _test; diff --git a/node_modules/shelljs/src/to.js b/node_modules/shelljs/src/to.js deleted file mode 100644 index e4b064f..0000000 --- a/node_modules/shelljs/src/to.js +++ /dev/null @@ -1,38 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('to', _to, { - pipeOnly: true, - wrapOutput: false, -}); - -//@ -//@ ### ShellString.prototype.to(file) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cat('input.txt').to('output.txt'); -//@ ``` -//@ -//@ Analogous to the redirection operator `>` in Unix, but works with -//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). _Like Unix -//@ redirections, `to()` will overwrite any existing file!_ Returns the same -//@ [ShellString](#shellstringstr) this operated on, to support chaining. -function _to(options, file) { - if (!file) common.error('wrong arguments'); - - if (!fs.existsSync(path.dirname(file))) { - common.error('no such file or directory: ' + path.dirname(file)); - } - - try { - fs.writeFileSync(file, this.stdout || this.toString(), 'utf8'); - return this; - } catch (e) { - /* istanbul ignore next */ - common.error('could not write to file (code ' + e.code + '): ' + file, { continue: true }); - } -} -module.exports = _to; diff --git a/node_modules/shelljs/src/toEnd.js b/node_modules/shelljs/src/toEnd.js deleted file mode 100644 index dc30e62..0000000 --- a/node_modules/shelljs/src/toEnd.js +++ /dev/null @@ -1,37 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('toEnd', _toEnd, { - pipeOnly: true, - wrapOutput: false, -}); - -//@ -//@ ### ShellString.prototype.toEnd(file) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ cat('input.txt').toEnd('output.txt'); -//@ ``` -//@ -//@ Analogous to the redirect-and-append operator `>>` in Unix, but works with -//@ `ShellStrings` (such as those returned by `cat`, `grep`, etc.). Returns the -//@ same [ShellString](#shellstringstr) this operated on, to support chaining. -function _toEnd(options, file) { - if (!file) common.error('wrong arguments'); - - if (!fs.existsSync(path.dirname(file))) { - common.error('no such file or directory: ' + path.dirname(file)); - } - - try { - fs.appendFileSync(file, this.stdout || this.toString(), 'utf8'); - return this; - } catch (e) { - /* istanbul ignore next */ - common.error('could not append to file (code ' + e.code + '): ' + file, { continue: true }); - } -} -module.exports = _toEnd; diff --git a/node_modules/shelljs/src/touch.js b/node_modules/shelljs/src/touch.js deleted file mode 100644 index a268586..0000000 --- a/node_modules/shelljs/src/touch.js +++ /dev/null @@ -1,117 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -common.register('touch', _touch, { - cmdOptions: { - 'a': 'atime_only', - 'c': 'no_create', - 'd': 'date', - 'm': 'mtime_only', - 'r': 'reference', - }, -}); - -//@ -//@ ### touch([options,] file [, file ...]) -//@ ### touch([options,] file_array) -//@ -//@ Available options: -//@ -//@ + `-a`: Change only the access time -//@ + `-c`: Do not create any files -//@ + `-m`: Change only the modification time -//@ + `{'-d': someDate}`, `{date: someDate}`: Use a `Date` instance (ex. `someDate`) -//@ instead of current time -//@ + `{'-r': file}`, `{reference: file}`: Use `file`'s times instead of current -//@ time -//@ -//@ Examples: -//@ -//@ ```javascript -//@ touch('source.js'); -//@ touch('-c', 'path/to/file.js'); -//@ touch({ '-r': 'referenceFile.txt' }, 'path/to/file.js'); -//@ touch({ '-d': new Date('December 17, 1995 03:24:00'), '-m': true }, 'path/to/file.js'); -//@ touch({ date: new Date('December 17, 1995 03:24:00') }, 'path/to/file.js'); -//@ ``` -//@ -//@ Update the access and modification times of each file to the current time. -//@ A file argument that does not exist is created empty, unless `-c` is supplied. -//@ This is a partial implementation of -//@ [`touch(1)`](http://linux.die.net/man/1/touch). Returns a -//@ [ShellString](#shellstringstr) indicating success or failure. -function _touch(opts, files) { - if (!files) { - common.error('no files given'); - } else if (typeof files === 'string') { - files = [].slice.call(arguments, 1); - } else { - common.error('file arg should be a string file path or an Array of string file paths'); - } - - files.forEach(function (f) { - touchFile(opts, f); - }); - return ''; -} - -function touchFile(opts, file) { - var stat = tryStatFile(file); - - if (stat && stat.isDirectory()) { - // don't error just exit - return; - } - - // if the file doesn't already exist and the user has specified --no-create then - // this script is finished - if (!stat && opts.no_create) { - return; - } - - // open the file and then close it. this will create it if it doesn't exist but will - // not truncate the file - fs.closeSync(fs.openSync(file, 'a')); - - // - // Set timestamps - // - - // setup some defaults - var now = new Date(); - var mtime = opts.date || now; - var atime = opts.date || now; - - // use reference file - if (opts.reference) { - var refStat = tryStatFile(opts.reference); - if (!refStat) { - common.error('failed to get attributess of ' + opts.reference); - } - mtime = refStat.mtime; - atime = refStat.atime; - } else if (opts.date) { - mtime = opts.date; - atime = opts.date; - } - - if (opts.atime_only && opts.mtime_only) { - // keep the new values of mtime and atime like GNU - } else if (opts.atime_only) { - mtime = stat.mtime; - } else if (opts.mtime_only) { - atime = stat.atime; - } - - fs.utimesSync(file, atime, mtime); -} - -module.exports = _touch; - -function tryStatFile(filePath) { - try { - return common.statFollowLinks(filePath); - } catch (e) { - return null; - } -} diff --git a/node_modules/shelljs/src/uniq.js b/node_modules/shelljs/src/uniq.js deleted file mode 100644 index 5802706..0000000 --- a/node_modules/shelljs/src/uniq.js +++ /dev/null @@ -1,93 +0,0 @@ -var fs = require('fs'); -var common = require('./common'); - -// add c spaces to the left of str -function lpad(c, str) { - var res = '' + str; - if (res.length < c) { - res = Array((c - res.length) + 1).join(' ') + res; - } - return res; -} - -common.register('uniq', _uniq, { - canReceivePipe: true, - cmdOptions: { - 'i': 'ignoreCase', - 'c': 'count', - 'd': 'duplicates', - }, -}); - -//@ -//@ ### uniq([options,] [input, [output]]) -//@ -//@ Available options: -//@ -//@ + `-i`: Ignore case while comparing -//@ + `-c`: Prefix lines by the number of occurrences -//@ + `-d`: Only print duplicate lines, one for each group of identical lines -//@ -//@ Examples: -//@ -//@ ```javascript -//@ uniq('foo.txt'); -//@ uniq('-i', 'foo.txt'); -//@ uniq('-cd', 'foo.txt', 'bar.txt'); -//@ ``` -//@ -//@ Filter adjacent matching lines from `input`. Returns a -//@ [ShellString](#shellstringstr). -function _uniq(options, input, output) { - // Check if this is coming from a pipe - var pipe = common.readFromPipe(); - - if (!pipe) { - if (!input) common.error('no input given'); - - if (!fs.existsSync(input)) { - common.error(input + ': No such file or directory'); - } else if (common.statFollowLinks(input).isDirectory()) { - common.error("error reading '" + input + "'"); - } - } - if (output && fs.existsSync(output) && common.statFollowLinks(output).isDirectory()) { - common.error(output + ': Is a directory'); - } - - var lines = (input ? fs.readFileSync(input, 'utf8') : pipe) - .trimRight() - .split('\n'); - - var compare = function (a, b) { - return options.ignoreCase ? - a.toLocaleLowerCase().localeCompare(b.toLocaleLowerCase()) : - a.localeCompare(b); - }; - var uniqed = lines.reduceRight(function (res, e) { - // Perform uniq -c on the input - if (res.length === 0) { - return [{ count: 1, ln: e }]; - } else if (compare(res[0].ln, e) === 0) { - return [{ count: res[0].count + 1, ln: e }].concat(res.slice(1)); - } else { - return [{ count: 1, ln: e }].concat(res); - } - }, []).filter(function (obj) { - // Do we want only duplicated objects? - return options.duplicates ? obj.count > 1 : true; - }).map(function (obj) { - // Are we tracking the counts of each line? - return (options.count ? (lpad(7, obj.count) + ' ') : '') + obj.ln; - }).join('\n') + '\n'; - - if (output) { - (new common.ShellString(uniqed)).to(output); - // if uniq writes to output, nothing is passed to the next command in the pipeline (if any) - return ''; - } else { - return uniqed; - } -} - -module.exports = _uniq; diff --git a/node_modules/shelljs/src/which.js b/node_modules/shelljs/src/which.js deleted file mode 100644 index 8ac7b77..0000000 --- a/node_modules/shelljs/src/which.js +++ /dev/null @@ -1,119 +0,0 @@ -var fs = require('fs'); -var path = require('path'); -var common = require('./common'); - -common.register('which', _which, { - allowGlobbing: false, - cmdOptions: { - 'a': 'all', - }, -}); - -// XP's system default value for `PATHEXT` system variable, just in case it's not -// set on Windows. -var XP_DEFAULT_PATHEXT = '.com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh'; - -// For earlier versions of NodeJS that doesn't have a list of constants (< v6) -var FILE_EXECUTABLE_MODE = 1; - -function isWindowsPlatform() { - return process.platform === 'win32'; -} - -// Cross-platform method for splitting environment `PATH` variables -function splitPath(p) { - return p ? p.split(path.delimiter) : []; -} - -// Tests are running all cases for this func but it stays uncovered by codecov due to unknown reason -/* istanbul ignore next */ -function isExecutable(pathName) { - try { - // TODO(node-support): replace with fs.constants.X_OK once remove support for node < v6 - fs.accessSync(pathName, FILE_EXECUTABLE_MODE); - } catch (err) { - return false; - } - return true; -} - -function checkPath(pathName) { - return fs.existsSync(pathName) && !common.statFollowLinks(pathName).isDirectory() - && (isWindowsPlatform() || isExecutable(pathName)); -} - -//@ -//@ ### which(command) -//@ -//@ Examples: -//@ -//@ ```javascript -//@ var nodeExec = which('node'); -//@ ``` -//@ -//@ Searches for `command` in the system's `PATH`. On Windows, this uses the -//@ `PATHEXT` variable to append the extension if it's not already executable. -//@ Returns a [ShellString](#shellstringstr) containing the absolute path to -//@ `command`. -function _which(options, cmd) { - if (!cmd) common.error('must specify command'); - - var isWindows = isWindowsPlatform(); - var pathArray = splitPath(process.env.PATH); - - var queryMatches = []; - - // No relative/absolute paths provided? - if (!cmd.includes('/')) { - // Assume that there are no extensions to append to queries (this is the - // case for unix) - var pathExtArray = ['']; - if (isWindows) { - // In case the PATHEXT variable is somehow not set (e.g. - // child_process.spawn with an empty environment), use the XP default. - var pathExtEnv = process.env.PATHEXT || XP_DEFAULT_PATHEXT; - pathExtArray = splitPath(pathExtEnv.toUpperCase()); - } - - // Search for command in PATH - for (var k = 0; k < pathArray.length; k++) { - // already found it - if (queryMatches.length > 0 && !options.all) break; - - var attempt = path.resolve(pathArray[k], cmd); - - if (isWindows) { - attempt = attempt.toUpperCase(); - } - - var match = attempt.match(/\.[^<>:"/|?*.]+$/); - if (match && pathExtArray.includes(match[0])) { // this is Windows-only - // The user typed a query with the file extension, like - // `which('node.exe')` - if (checkPath(attempt)) { - queryMatches.push(attempt); - break; - } - } else { // All-platforms - // Cycle through the PATHEXT array, and check each extension - // Note: the array is always [''] on Unix - for (var i = 0; i < pathExtArray.length; i++) { - var ext = pathExtArray[i]; - var newAttempt = attempt + ext; - if (checkPath(newAttempt)) { - queryMatches.push(newAttempt); - break; - } - } - } - } - } else if (checkPath(cmd)) { // a valid absolute or relative path - queryMatches.push(path.resolve(cmd)); - } - - if (queryMatches.length > 0) { - return options.all ? queryMatches : queryMatches[0]; - } - return options.all ? [] : null; -} -module.exports = _which; diff --git a/node_modules/signal-exit/LICENSE.txt b/node_modules/signal-exit/LICENSE.txt deleted file mode 100644 index eead04a..0000000 --- a/node_modules/signal-exit/LICENSE.txt +++ /dev/null @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) 2015, Contributors - -Permission to use, copy, modify, and/or distribute this software -for any purpose with or without fee is hereby granted, provided -that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE -LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES -OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, -ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/signal-exit/README.md b/node_modules/signal-exit/README.md deleted file mode 100644 index f9c7c00..0000000 --- a/node_modules/signal-exit/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# signal-exit - -[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) -[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) -[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) -[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) - -When you want to fire an event no matter how a process exits: - -* reaching the end of execution. -* explicitly having `process.exit(code)` called. -* having `process.kill(pid, sig)` called. -* receiving a fatal signal from outside the process - -Use `signal-exit`. - -```js -var onExit = require('signal-exit') - -onExit(function (code, signal) { - console.log('process exited!') -}) -``` - -## API - -`var remove = onExit(function (code, signal) {}, options)` - -The return value of the function is a function that will remove the -handler. - -Note that the function *only* fires for signals if the signal would -cause the process to exit. That is, there are no other listeners, and -it is a fatal signal. - -## Options - -* `alwaysLast`: Run this handler after any other signal or exit - handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/signal-exit/index.js b/node_modules/signal-exit/index.js deleted file mode 100644 index 93703f3..0000000 --- a/node_modules/signal-exit/index.js +++ /dev/null @@ -1,202 +0,0 @@ -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -// grab a reference to node's real process object right away -var process = global.process - -const processOk = function (process) { - return process && - typeof process === 'object' && - typeof process.removeListener === 'function' && - typeof process.emit === 'function' && - typeof process.reallyExit === 'function' && - typeof process.listeners === 'function' && - typeof process.kill === 'function' && - typeof process.pid === 'number' && - typeof process.on === 'function' -} - -// some kind of non-node environment, just no-op -/* istanbul ignore if */ -if (!processOk(process)) { - module.exports = function () { - return function () {} - } -} else { - var assert = require('assert') - var signals = require('./signals.js') - var isWin = /^win/i.test(process.platform) - - var EE = require('events') - /* istanbul ignore if */ - if (typeof EE !== 'function') { - EE = EE.EventEmitter - } - - var emitter - if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ - } else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} - } - - // Because this emitter is a global, we have to check to see if a - // previous version of this library failed to enable infinite listeners. - // I know what you're about to say. But literally everything about - // signal-exit is a compromise with evil. Get used to it. - if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true - } - - module.exports = function (cb, opts) { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return function () {} - } - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - - if (loaded === false) { - load() - } - - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() - } - } - emitter.on(ev, cb) - - return remove - } - - var unload = function unload () { - if (!loaded || !processOk(global.process)) { - return - } - loaded = false - - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 - } - module.exports.unload = unload - - var emit = function emit (event, code, signal) { - /* istanbul ignore if */ - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) - } - - // { : , ... } - var sigListeners = {} - signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return - } - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - /* istanbul ignore next */ - process.kill(process.pid, sig) - } - } - }) - - module.exports.signals = function () { - return signals - } - - var loaded = false - - var load = function load () { - if (loaded || !processOk(global.process)) { - return - } - loaded = true - - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) - - process.emit = processEmit - process.reallyExit = processReallyExit - } - module.exports.load = load - - var originalProcessReallyExit = process.reallyExit - var processReallyExit = function processReallyExit (code) { - /* istanbul ignore if */ - if (!processOk(global.process)) { - return - } - process.exitCode = code || /* istanbul ignore next */ 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) - } - - var originalProcessEmit = process.emit - var processEmit = function processEmit (ev, arg) { - if (ev === 'exit' && processOk(global.process)) { - /* istanbul ignore else */ - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - /* istanbul ignore next */ - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } - } -} diff --git a/node_modules/signal-exit/package.json b/node_modules/signal-exit/package.json deleted file mode 100644 index e1a0031..0000000 --- a/node_modules/signal-exit/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "signal-exit", - "version": "3.0.7", - "description": "when you want to fire an event no matter how a process exits.", - "main": "index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "files": [ - "index.js", - "signals.js" - ], - "repository": { - "type": "git", - "url": "https://github.com/tapjs/signal-exit.git" - }, - "keywords": [ - "signal", - "exit" - ], - "author": "Ben Coe ", - "license": "ISC", - "bugs": { - "url": "https://github.com/tapjs/signal-exit/issues" - }, - "homepage": "https://github.com/tapjs/signal-exit", - "devDependencies": { - "chai": "^3.5.0", - "coveralls": "^3.1.1", - "nyc": "^15.1.0", - "standard-version": "^9.3.1", - "tap": "^15.1.1" - } -} diff --git a/node_modules/signal-exit/signals.js b/node_modules/signal-exit/signals.js deleted file mode 100644 index 3bd67a8..0000000 --- a/node_modules/signal-exit/signals.js +++ /dev/null @@ -1,53 +0,0 @@ -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] - -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} - -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} diff --git a/node_modules/strip-final-newline/index.js b/node_modules/strip-final-newline/index.js deleted file mode 100644 index 78fc0c5..0000000 --- a/node_modules/strip-final-newline/index.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -module.exports = input => { - const LF = typeof input === 'string' ? '\n' : '\n'.charCodeAt(); - const CR = typeof input === 'string' ? '\r' : '\r'.charCodeAt(); - - if (input[input.length - 1] === LF) { - input = input.slice(0, input.length - 1); - } - - if (input[input.length - 1] === CR) { - input = input.slice(0, input.length - 1); - } - - return input; -}; diff --git a/node_modules/strip-final-newline/license b/node_modules/strip-final-newline/license deleted file mode 100644 index e7af2f7..0000000 --- a/node_modules/strip-final-newline/license +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (sindresorhus.com) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-final-newline/package.json b/node_modules/strip-final-newline/package.json deleted file mode 100644 index d9f2a6c..0000000 --- a/node_modules/strip-final-newline/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "strip-final-newline", - "version": "2.0.0", - "description": "Strip the final newline character from a string/buffer", - "license": "MIT", - "repository": "sindresorhus/strip-final-newline", - "author": { - "name": "Sindre Sorhus", - "email": "sindresorhus@gmail.com", - "url": "sindresorhus.com" - }, - "engines": { - "node": ">=6" - }, - "scripts": { - "test": "xo && ava" - }, - "files": [ - "index.js" - ], - "keywords": [ - "strip", - "trim", - "remove", - "delete", - "final", - "last", - "end", - "file", - "newline", - "linebreak", - "character", - "string", - "buffer" - ], - "devDependencies": { - "ava": "^0.25.0", - "xo": "^0.23.0" - } -} diff --git a/node_modules/strip-final-newline/readme.md b/node_modules/strip-final-newline/readme.md deleted file mode 100644 index 32dfd50..0000000 --- a/node_modules/strip-final-newline/readme.md +++ /dev/null @@ -1,30 +0,0 @@ -# strip-final-newline [![Build Status](https://travis-ci.com/sindresorhus/strip-final-newline.svg?branch=master)](https://travis-ci.com/sindresorhus/strip-final-newline) - -> Strip the final [newline character](https://en.wikipedia.org/wiki/Newline) from a string/buffer - -Can be useful when parsing the output of, for example, `ChildProcess#execFile`, as [binaries usually output a newline at the end](https://stackoverflow.com/questions/729692/why-should-text-files-end-with-a-newline). Normally, you would use `stdout.trim()`, but that would also remove newlines at the start and whitespace. - - -## Install - -``` -$ npm install strip-final-newline -``` - - -## Usage - -```js -const stripFinalNewline = require('strip-final-newline'); - -stripFinalNewline('foo\nbar\n\n'); -//=> 'foo\nbar\n' - -stripFinalNewline(Buffer.from('foo\nbar\n\n')).toString(); -//=> 'foo\nbar\n' -``` - - -## License - -MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/to-regex-range/LICENSE b/node_modules/to-regex-range/LICENSE deleted file mode 100644 index 7cccaf9..0000000 --- a/node_modules/to-regex-range/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015-present, Jon Schlinkert. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/node_modules/to-regex-range/README.md b/node_modules/to-regex-range/README.md deleted file mode 100644 index 38887da..0000000 --- a/node_modules/to-regex-range/README.md +++ /dev/null @@ -1,305 +0,0 @@ -# to-regex-range [![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=W8YFZ425KND68) [![NPM version](https://img.shields.io/npm/v/to-regex-range.svg?style=flat)](https://www.npmjs.com/package/to-regex-range) [![NPM monthly downloads](https://img.shields.io/npm/dm/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![NPM total downloads](https://img.shields.io/npm/dt/to-regex-range.svg?style=flat)](https://npmjs.org/package/to-regex-range) [![Linux Build Status](https://img.shields.io/travis/micromatch/to-regex-range.svg?style=flat&label=Travis)](https://travis-ci.org/micromatch/to-regex-range) - -> Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions. - -Please consider following this project's author, [Jon Schlinkert](https://github.com/jonschlinkert), and consider starring the project to show your :heart: and support. - -## Install - -Install with [npm](https://www.npmjs.com/): - -```sh -$ npm install --save to-regex-range -``` - -
-What does this do? - -
- -This libary generates the `source` string to be passed to `new RegExp()` for matching a range of numbers. - -**Example** - -```js -const toRegexRange = require('to-regex-range'); -const regex = new RegExp(toRegexRange('15', '95')); -``` - -A string is returned so that you can do whatever you need with it before passing it to `new RegExp()` (like adding `^` or `$` boundaries, defining flags, or combining it another string). - -
- -
- -
-Why use this library? - -
- -### Convenience - -Creating regular expressions for matching numbers gets deceptively complicated pretty fast. - -For example, let's say you need a validation regex for matching part of a user-id, postal code, social security number, tax id, etc: - -* regex for matching `1` => `/1/` (easy enough) -* regex for matching `1` through `5` => `/[1-5]/` (not bad...) -* regex for matching `1` or `5` => `/(1|5)/` (still easy...) -* regex for matching `1` through `50` => `/([1-9]|[1-4][0-9]|50)/` (uh-oh...) -* regex for matching `1` through `55` => `/([1-9]|[1-4][0-9]|5[0-5])/` (no prob, I can do this...) -* regex for matching `1` through `555` => `/([1-9]|[1-9][0-9]|[1-4][0-9]{2}|5[0-4][0-9]|55[0-5])/` (maybe not...) -* regex for matching `0001` through `5555` => `/(0{3}[1-9]|0{2}[1-9][0-9]|0[1-9][0-9]{2}|[1-4][0-9]{3}|5[0-4][0-9]{2}|55[0-4][0-9]|555[0-5])/` (okay, I get the point!) - -The numbers are contrived, but they're also really basic. In the real world you might need to generate a regex on-the-fly for validation. - -**Learn more** - -If you're interested in learning more about [character classes](http://www.regular-expressions.info/charclass.html) and other regex features, I personally have always found [regular-expressions.info](http://www.regular-expressions.info/charclass.html) to be pretty useful. - -### Heavily tested - -As of April 07, 2019, this library runs [>1m test assertions](./test/test.js) against generated regex-ranges to provide brute-force verification that results are correct. - -Tests run in ~280ms on my MacBook Pro, 2.5 GHz Intel Core i7. - -### Optimized - -Generated regular expressions are optimized: - -* duplicate sequences and character classes are reduced using quantifiers -* smart enough to use `?` conditionals when number(s) or range(s) can be positive or negative -* uses fragment caching to avoid processing the same exact string more than once - -
- -
- -## Usage - -Add this library to your javascript application with the following line of code - -```js -const toRegexRange = require('to-regex-range'); -``` - -The main export is a function that takes two integers: the `min` value and `max` value (formatted as strings or numbers). - -```js -const source = toRegexRange('15', '95'); -//=> 1[5-9]|[2-8][0-9]|9[0-5] - -const regex = new RegExp(`^${source}$`); -console.log(regex.test('14')); //=> false -console.log(regex.test('50')); //=> true -console.log(regex.test('94')); //=> true -console.log(regex.test('96')); //=> false -``` - -## Options - -### options.capture - -**Type**: `boolean` - -**Deafault**: `undefined` - -Wrap the returned value in parentheses when there is more than one regex condition. Useful when you're dynamically generating ranges. - -```js -console.log(toRegexRange('-10', '10')); -//=> -[1-9]|-?10|[0-9] - -console.log(toRegexRange('-10', '10', { capture: true })); -//=> (-[1-9]|-?10|[0-9]) -``` - -### options.shorthand - -**Type**: `boolean` - -**Deafault**: `undefined` - -Use the regex shorthand for `[0-9]`: - -```js -console.log(toRegexRange('0', '999999')); -//=> [0-9]|[1-9][0-9]{1,5} - -console.log(toRegexRange('0', '999999', { shorthand: true })); -//=> \d|[1-9]\d{1,5} -``` - -### options.relaxZeros - -**Type**: `boolean` - -**Default**: `true` - -This option relaxes matching for leading zeros when when ranges are zero-padded. - -```js -const source = toRegexRange('-0010', '0010'); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> true -console.log(regex.test('-010')); //=> true -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> true -console.log(regex.test('010')); //=> true -console.log(regex.test('0010')); //=> true -``` - -When `relaxZeros` is false, matching is strict: - -```js -const source = toRegexRange('-0010', '0010', { relaxZeros: false }); -const regex = new RegExp(`^${source}$`); -console.log(regex.test('-10')); //=> false -console.log(regex.test('-010')); //=> false -console.log(regex.test('-0010')); //=> true -console.log(regex.test('10')); //=> false -console.log(regex.test('010')); //=> false -console.log(regex.test('0010')); //=> true -``` - -## Examples - -| **Range** | **Result** | **Compile time** | -| --- | --- | --- | -| `toRegexRange(-10, 10)` | `-[1-9]\|-?10\|[0-9]` | _132μs_ | -| `toRegexRange(-100, -10)` | `-1[0-9]\|-[2-9][0-9]\|-100` | _50μs_ | -| `toRegexRange(-100, 100)` | `-[1-9]\|-?[1-9][0-9]\|-?100\|[0-9]` | _42μs_ | -| `toRegexRange(001, 100)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|100` | _109μs_ | -| `toRegexRange(001, 555)` | `0{0,2}[1-9]\|0?[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _51μs_ | -| `toRegexRange(0010, 1000)` | `0{0,2}1[0-9]\|0{0,2}[2-9][0-9]\|0?[1-9][0-9]{2}\|1000` | _31μs_ | -| `toRegexRange(1, 50)` | `[1-9]\|[1-4][0-9]\|50` | _24μs_ | -| `toRegexRange(1, 55)` | `[1-9]\|[1-4][0-9]\|5[0-5]` | _23μs_ | -| `toRegexRange(1, 555)` | `[1-9]\|[1-9][0-9]\|[1-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _30μs_ | -| `toRegexRange(1, 5555)` | `[1-9]\|[1-9][0-9]{1,2}\|[1-4][0-9]{3}\|5[0-4][0-9]{2}\|55[0-4][0-9]\|555[0-5]` | _43μs_ | -| `toRegexRange(111, 555)` | `11[1-9]\|1[2-9][0-9]\|[2-4][0-9]{2}\|5[0-4][0-9]\|55[0-5]` | _38μs_ | -| `toRegexRange(29, 51)` | `29\|[34][0-9]\|5[01]` | _24μs_ | -| `toRegexRange(31, 877)` | `3[1-9]\|[4-9][0-9]\|[1-7][0-9]{2}\|8[0-6][0-9]\|87[0-7]` | _32μs_ | -| `toRegexRange(5, 5)` | `5` | _8μs_ | -| `toRegexRange(5, 6)` | `5\|6` | _11μs_ | -| `toRegexRange(1, 2)` | `1\|2` | _6μs_ | -| `toRegexRange(1, 5)` | `[1-5]` | _15μs_ | -| `toRegexRange(1, 10)` | `[1-9]\|10` | _22μs_ | -| `toRegexRange(1, 100)` | `[1-9]\|[1-9][0-9]\|100` | _25μs_ | -| `toRegexRange(1, 1000)` | `[1-9]\|[1-9][0-9]{1,2}\|1000` | _31μs_ | -| `toRegexRange(1, 10000)` | `[1-9]\|[1-9][0-9]{1,3}\|10000` | _34μs_ | -| `toRegexRange(1, 100000)` | `[1-9]\|[1-9][0-9]{1,4}\|100000` | _36μs_ | -| `toRegexRange(1, 1000000)` | `[1-9]\|[1-9][0-9]{1,5}\|1000000` | _42μs_ | -| `toRegexRange(1, 10000000)` | `[1-9]\|[1-9][0-9]{1,6}\|10000000` | _42μs_ | - -## Heads up! - -**Order of arguments** - -When the `min` is larger than the `max`, values will be flipped to create a valid range: - -```js -toRegexRange('51', '29'); -``` - -Is effectively flipped to: - -```js -toRegexRange('29', '51'); -//=> 29|[3-4][0-9]|5[0-1] -``` - -**Steps / increments** - -This library does not support steps (increments). A pr to add support would be welcome. - -## History - -### v2.0.0 - 2017-04-21 - -**New features** - -Adds support for zero-padding! - -### v1.0.0 - -**Optimizations** - -Repeating ranges are now grouped using quantifiers. rocessing time is roughly the same, but the generated regex is much smaller, which should result in faster matching. - -## Attribution - -Inspired by the python library [range-regex](https://github.com/dimka665/range-regex). - -## About - -
-Contributing - -Pull requests and stars are always welcome. For bugs and feature requests, [please create an issue](../../issues/new). - -
- -
-Running Tests - -Running and reviewing unit tests is a great way to get familiarized with a library and its API. You can install dependencies and run tests with the following command: - -```sh -$ npm install && npm test -``` - -
- -
-Building docs - -_(This project's readme.md is generated by [verb](https://github.com/verbose/verb-generate-readme), please don't edit the readme directly. Any changes to the readme must be made in the [.verb.md](.verb.md) readme template.)_ - -To generate the readme, run the following command: - -```sh -$ npm install -g verbose/verb#dev verb-generate-readme && verb -``` - -
- -### Related projects - -You might also be interested in these projects: - -* [expand-range](https://www.npmjs.com/package/expand-range): Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used… [more](https://github.com/jonschlinkert/expand-range) | [homepage](https://github.com/jonschlinkert/expand-range "Fast, bash-like range expansion. Expand a range of numbers or letters, uppercase or lowercase. Used by micromatch.") -* [fill-range](https://www.npmjs.com/package/fill-range): Fill in a range of numbers or letters, optionally passing an increment or `step` to… [more](https://github.com/jonschlinkert/fill-range) | [homepage](https://github.com/jonschlinkert/fill-range "Fill in a range of numbers or letters, optionally passing an increment or `step` to use, or create a regex-compatible range with `options.toRegex`") -* [micromatch](https://www.npmjs.com/package/micromatch): Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch. | [homepage](https://github.com/micromatch/micromatch "Glob matching for javascript/node.js. A drop-in replacement and faster alternative to minimatch and multimatch.") -* [repeat-element](https://www.npmjs.com/package/repeat-element): Create an array by repeating the given value n times. | [homepage](https://github.com/jonschlinkert/repeat-element "Create an array by repeating the given value n times.") -* [repeat-string](https://www.npmjs.com/package/repeat-string): Repeat the given string n times. Fastest implementation for repeating a string. | [homepage](https://github.com/jonschlinkert/repeat-string "Repeat the given string n times. Fastest implementation for repeating a string.") - -### Contributors - -| **Commits** | **Contributor** | -| --- | --- | -| 63 | [jonschlinkert](https://github.com/jonschlinkert) | -| 3 | [doowb](https://github.com/doowb) | -| 2 | [realityking](https://github.com/realityking) | - -### Author - -**Jon Schlinkert** - -* [GitHub Profile](https://github.com/jonschlinkert) -* [Twitter Profile](https://twitter.com/jonschlinkert) -* [LinkedIn Profile](https://linkedin.com/in/jonschlinkert) - -Please consider supporting me on Patreon, or [start your own Patreon page](https://patreon.com/invite/bxpbvm)! - - - - - -### License - -Copyright © 2019, [Jon Schlinkert](https://github.com/jonschlinkert). -Released under the [MIT License](LICENSE). - -*** - -_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.8.0, on April 07, 2019._ \ No newline at end of file diff --git a/node_modules/to-regex-range/index.js b/node_modules/to-regex-range/index.js deleted file mode 100644 index 77fbace..0000000 --- a/node_modules/to-regex-range/index.js +++ /dev/null @@ -1,288 +0,0 @@ -/*! - * to-regex-range - * - * Copyright (c) 2015-present, Jon Schlinkert. - * Released under the MIT License. - */ - -'use strict'; - -const isNumber = require('is-number'); - -const toRegexRange = (min, max, options) => { - if (isNumber(min) === false) { - throw new TypeError('toRegexRange: expected the first argument to be a number'); - } - - if (max === void 0 || min === max) { - return String(min); - } - - if (isNumber(max) === false) { - throw new TypeError('toRegexRange: expected the second argument to be a number.'); - } - - let opts = { relaxZeros: true, ...options }; - if (typeof opts.strictZeros === 'boolean') { - opts.relaxZeros = opts.strictZeros === false; - } - - let relax = String(opts.relaxZeros); - let shorthand = String(opts.shorthand); - let capture = String(opts.capture); - let wrap = String(opts.wrap); - let cacheKey = min + ':' + max + '=' + relax + shorthand + capture + wrap; - - if (toRegexRange.cache.hasOwnProperty(cacheKey)) { - return toRegexRange.cache[cacheKey].result; - } - - let a = Math.min(min, max); - let b = Math.max(min, max); - - if (Math.abs(a - b) === 1) { - let result = min + '|' + max; - if (opts.capture) { - return `(${result})`; - } - if (opts.wrap === false) { - return result; - } - return `(?:${result})`; - } - - let isPadded = hasPadding(min) || hasPadding(max); - let state = { min, max, a, b }; - let positives = []; - let negatives = []; - - if (isPadded) { - state.isPadded = isPadded; - state.maxLen = String(state.max).length; - } - - if (a < 0) { - let newMin = b < 0 ? Math.abs(b) : 1; - negatives = splitToPatterns(newMin, Math.abs(a), state, opts); - a = state.a = 0; - } - - if (b >= 0) { - positives = splitToPatterns(a, b, state, opts); - } - - state.negatives = negatives; - state.positives = positives; - state.result = collatePatterns(negatives, positives, opts); - - if (opts.capture === true) { - state.result = `(${state.result})`; - } else if (opts.wrap !== false && (positives.length + negatives.length) > 1) { - state.result = `(?:${state.result})`; - } - - toRegexRange.cache[cacheKey] = state; - return state.result; -}; - -function collatePatterns(neg, pos, options) { - let onlyNegative = filterPatterns(neg, pos, '-', false, options) || []; - let onlyPositive = filterPatterns(pos, neg, '', false, options) || []; - let intersected = filterPatterns(neg, pos, '-?', true, options) || []; - let subpatterns = onlyNegative.concat(intersected).concat(onlyPositive); - return subpatterns.join('|'); -} - -function splitToRanges(min, max) { - let nines = 1; - let zeros = 1; - - let stop = countNines(min, nines); - let stops = new Set([max]); - - while (min <= stop && stop <= max) { - stops.add(stop); - nines += 1; - stop = countNines(min, nines); - } - - stop = countZeros(max + 1, zeros) - 1; - - while (min < stop && stop <= max) { - stops.add(stop); - zeros += 1; - stop = countZeros(max + 1, zeros) - 1; - } - - stops = [...stops]; - stops.sort(compare); - return stops; -} - -/** - * Convert a range to a regex pattern - * @param {Number} `start` - * @param {Number} `stop` - * @return {String} - */ - -function rangeToPattern(start, stop, options) { - if (start === stop) { - return { pattern: start, count: [], digits: 0 }; - } - - let zipped = zip(start, stop); - let digits = zipped.length; - let pattern = ''; - let count = 0; - - for (let i = 0; i < digits; i++) { - let [startDigit, stopDigit] = zipped[i]; - - if (startDigit === stopDigit) { - pattern += startDigit; - - } else if (startDigit !== '0' || stopDigit !== '9') { - pattern += toCharacterClass(startDigit, stopDigit, options); - - } else { - count++; - } - } - - if (count) { - pattern += options.shorthand === true ? '\\d' : '[0-9]'; - } - - return { pattern, count: [count], digits }; -} - -function splitToPatterns(min, max, tok, options) { - let ranges = splitToRanges(min, max); - let tokens = []; - let start = min; - let prev; - - for (let i = 0; i < ranges.length; i++) { - let max = ranges[i]; - let obj = rangeToPattern(String(start), String(max), options); - let zeros = ''; - - if (!tok.isPadded && prev && prev.pattern === obj.pattern) { - if (prev.count.length > 1) { - prev.count.pop(); - } - - prev.count.push(obj.count[0]); - prev.string = prev.pattern + toQuantifier(prev.count); - start = max + 1; - continue; - } - - if (tok.isPadded) { - zeros = padZeros(max, tok, options); - } - - obj.string = zeros + obj.pattern + toQuantifier(obj.count); - tokens.push(obj); - start = max + 1; - prev = obj; - } - - return tokens; -} - -function filterPatterns(arr, comparison, prefix, intersection, options) { - let result = []; - - for (let ele of arr) { - let { string } = ele; - - // only push if _both_ are negative... - if (!intersection && !contains(comparison, 'string', string)) { - result.push(prefix + string); - } - - // or _both_ are positive - if (intersection && contains(comparison, 'string', string)) { - result.push(prefix + string); - } - } - return result; -} - -/** - * Zip strings - */ - -function zip(a, b) { - let arr = []; - for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]); - return arr; -} - -function compare(a, b) { - return a > b ? 1 : b > a ? -1 : 0; -} - -function contains(arr, key, val) { - return arr.some(ele => ele[key] === val); -} - -function countNines(min, len) { - return Number(String(min).slice(0, -len) + '9'.repeat(len)); -} - -function countZeros(integer, zeros) { - return integer - (integer % Math.pow(10, zeros)); -} - -function toQuantifier(digits) { - let [start = 0, stop = ''] = digits; - if (stop || start > 1) { - return `{${start + (stop ? ',' + stop : '')}}`; - } - return ''; -} - -function toCharacterClass(a, b, options) { - return `[${a}${(b - a === 1) ? '' : '-'}${b}]`; -} - -function hasPadding(str) { - return /^-?(0+)\d/.test(str); -} - -function padZeros(value, tok, options) { - if (!tok.isPadded) { - return value; - } - - let diff = Math.abs(tok.maxLen - String(value).length); - let relax = options.relaxZeros !== false; - - switch (diff) { - case 0: - return ''; - case 1: - return relax ? '0?' : '0'; - case 2: - return relax ? '0{0,2}' : '00'; - default: { - return relax ? `0{0,${diff}}` : `0{${diff}}`; - } - } -} - -/** - * Cache - */ - -toRegexRange.cache = {}; -toRegexRange.clearCache = () => (toRegexRange.cache = {}); - -/** - * Expose `toRegexRange` - */ - -module.exports = toRegexRange; diff --git a/node_modules/to-regex-range/package.json b/node_modules/to-regex-range/package.json deleted file mode 100644 index 4ef194f..0000000 --- a/node_modules/to-regex-range/package.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "name": "to-regex-range", - "description": "Pass two numbers, get a regex-compatible source string for matching ranges. Validated against more than 2.78 million test assertions.", - "version": "5.0.1", - "homepage": "https://github.com/micromatch/to-regex-range", - "author": "Jon Schlinkert (https://github.com/jonschlinkert)", - "contributors": [ - "Jon Schlinkert (http://twitter.com/jonschlinkert)", - "Rouven Weßling (www.rouvenwessling.de)" - ], - "repository": "micromatch/to-regex-range", - "bugs": { - "url": "https://github.com/micromatch/to-regex-range/issues" - }, - "license": "MIT", - "files": [ - "index.js" - ], - "main": "index.js", - "engines": { - "node": ">=8.0" - }, - "scripts": { - "test": "mocha" - }, - "dependencies": { - "is-number": "^7.0.0" - }, - "devDependencies": { - "fill-range": "^6.0.0", - "gulp-format-md": "^2.0.0", - "mocha": "^6.0.2", - "text-table": "^0.2.0", - "time-diff": "^0.3.1" - }, - "keywords": [ - "bash", - "date", - "expand", - "expansion", - "expression", - "glob", - "match", - "match date", - "match number", - "match numbers", - "match year", - "matches", - "matching", - "number", - "numbers", - "numerical", - "range", - "ranges", - "regex", - "regexp", - "regular", - "regular expression", - "sequence" - ], - "verb": { - "layout": "default", - "toc": false, - "tasks": [ - "readme" - ], - "plugins": [ - "gulp-format-md" - ], - "lint": { - "reflinks": true - }, - "helpers": { - "examples": { - "displayName": "examples" - } - }, - "related": { - "list": [ - "expand-range", - "fill-range", - "micromatch", - "repeat-element", - "repeat-string" - ] - } - } -} diff --git a/node_modules/which/CHANGELOG.md b/node_modules/which/CHANGELOG.md deleted file mode 100644 index 7fb1f20..0000000 --- a/node_modules/which/CHANGELOG.md +++ /dev/null @@ -1,166 +0,0 @@ -# Changes - - -## 2.0.2 - -* Rename bin to `node-which` - -## 2.0.1 - -* generate changelog and publish on version bump -* enforce 100% test coverage -* Promise interface - -## 2.0.0 - -* Parallel tests, modern JavaScript, and drop support for node < 8 - -## 1.3.1 - -* update deps -* update travis - -## v1.3.0 - -* Add nothrow option to which.sync -* update tap - -## v1.2.14 - -* appveyor: drop node 5 and 0.x -* travis-ci: add node 6, drop 0.x - -## v1.2.13 - -* test: Pass missing option to pass on windows -* update tap -* update isexe to 2.0.0 -* neveragain.tech pledge request - -## v1.2.12 - -* Removed unused require - -## v1.2.11 - -* Prevent changelog script from being included in package - -## v1.2.10 - -* Use env.PATH only, not env.Path - -## v1.2.9 - -* fix for paths starting with ../ -* Remove unused `is-absolute` module - -## v1.2.8 - -* bullet items in changelog that contain (but don't start with) # - -## v1.2.7 - -* strip 'update changelog' changelog entries out of changelog - -## v1.2.6 - -* make the changelog bulleted - -## v1.2.5 - -* make a changelog, and keep it up to date -* don't include tests in package -* Properly handle relative-path executables -* appveyor -* Attach error code to Not Found error -* Make tests pass on Windows - -## v1.2.4 - -* Fix typo - -## v1.2.3 - -* update isexe, fix regression in pathExt handling - -## v1.2.2 - -* update deps, use isexe module, test windows - -## v1.2.1 - -* Sometimes windows PATH entries are quoted -* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode. -* doc cli - -## v1.2.0 - -* Add support for opt.all and -as cli flags -* test the bin -* update travis -* Allow checking for multiple programs in bin/which -* tap 2 - -## v1.1.2 - -* travis -* Refactored and fixed undefined error on Windows -* Support strict mode - -## v1.1.1 - -* test +g exes against secondary groups, if available -* Use windows exe semantics on cygwin & msys -* cwd should be first in path on win32, not last -* Handle lower-case 'env.Path' on Windows -* Update docs -* use single-quotes - -## v1.1.0 - -* Add tests, depend on is-absolute - -## v1.0.9 - -* which.js: root is allowed to execute files owned by anyone - -## v1.0.8 - -* don't use graceful-fs - -## v1.0.7 - -* add license to package.json - -## v1.0.6 - -* isc license - -## 1.0.5 - -* Awful typo - -## 1.0.4 - -* Test for path absoluteness properly -* win: Allow '' as a pathext if cmd has a . in it - -## 1.0.3 - -* Remove references to execPath -* Make `which.sync()` work on Windows by honoring the PATHEXT variable. -* Make `isExe()` always return true on Windows. -* MIT - -## 1.0.2 - -* Only files can be exes - -## 1.0.1 - -* Respect the PATHEXT env for win32 support -* should 0755 the bin -* binary -* guts -* package -* 1st diff --git a/node_modules/which/LICENSE b/node_modules/which/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/which/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/which/README.md b/node_modules/which/README.md deleted file mode 100644 index cd83350..0000000 --- a/node_modules/which/README.md +++ /dev/null @@ -1,54 +0,0 @@ -# which - -Like the unix `which` utility. - -Finds the first instance of a specified executable in the PATH -environment variable. Does not cache the results, so `hash -r` is not -needed when the PATH changes. - -## USAGE - -```javascript -var which = require('which') - -// async usage -which('node', function (er, resolvedPath) { - // er is returned if no "node" is found on the PATH - // if it is found, then the absolute path to the exec is returned -}) - -// or promise -which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) - -// sync usage -// throws if not found -var resolved = which.sync('node') - -// if nothrow option is used, returns null if not found -resolved = which.sync('node', {nothrow: true}) - -// Pass options to override the PATH and PATHEXT environment vars. -which('node', { path: someOtherPath }, function (er, resolved) { - if (er) - throw er - console.log('found at %j', resolved) -}) -``` - -## CLI USAGE - -Same as the BSD `which(1)` binary. - -``` -usage: which [-as] program ... -``` - -## OPTIONS - -You may pass an options object as the second argument. - -- `path`: Use instead of the `PATH` environment variable. -- `pathExt`: Use instead of the `PATHEXT` environment variable. -- `all`: Return all matches, instead of just the first one. Note that - this means the function returns an array of strings instead of a - single string. diff --git a/node_modules/which/bin/node-which b/node_modules/which/bin/node-which deleted file mode 100755 index 7cee372..0000000 --- a/node_modules/which/bin/node-which +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env node -var which = require("../") -if (process.argv.length < 3) - usage() - -function usage () { - console.error('usage: which [-as] program ...') - process.exit(1) -} - -var all = false -var silent = false -var dashdash = false -var args = process.argv.slice(2).filter(function (arg) { - if (dashdash || !/^-/.test(arg)) - return true - - if (arg === '--') { - dashdash = true - return false - } - - var flags = arg.substr(1).split('') - for (var f = 0; f < flags.length; f++) { - var flag = flags[f] - switch (flag) { - case 's': - silent = true - break - case 'a': - all = true - break - default: - console.error('which: illegal option -- ' + flag) - usage() - } - } - return false -}) - -process.exit(args.reduce(function (pv, current) { - try { - var f = which.sync(current, { all: all }) - if (all) - f = f.join('\n') - if (!silent) - console.log(f) - return pv; - } catch (e) { - return 1; - } -}, 0)) diff --git a/node_modules/which/package.json b/node_modules/which/package.json deleted file mode 100644 index 97ad7fb..0000000 --- a/node_modules/which/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me)", - "name": "which", - "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", - "version": "2.0.2", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-which.git" - }, - "main": "which.js", - "bin": { - "node-which": "./bin/node-which" - }, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "devDependencies": { - "mkdirp": "^0.5.0", - "rimraf": "^2.6.2", - "tap": "^14.6.9" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublish": "npm run changelog", - "prechangelog": "bash gen-changelog.sh", - "changelog": "git add CHANGELOG.md", - "postchangelog": "git commit -m 'update changelog - '${npm_package_version}", - "postpublish": "git push origin --follow-tags" - }, - "files": [ - "which.js", - "bin/node-which" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/which/which.js b/node_modules/which/which.js deleted file mode 100644 index 82afffd..0000000 --- a/node_modules/which/which.js +++ /dev/null @@ -1,125 +0,0 @@ -const isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' - -const path = require('path') -const COLON = isWindows ? ';' : ':' -const isexe = require('isexe') - -const getNotFoundError = (cmd) => - Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) - -const getPathInfo = (cmd, opt) => { - const colon = opt.colon || COLON - - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] - : ( - [ - // windows always checks the cwd first - ...(isWindows ? [process.cwd()] : []), - ...(opt.path || process.env.PATH || - /* istanbul ignore next: very unusual */ '').split(colon), - ] - ) - const pathExtExe = isWindows - ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' - : '' - const pathExt = isWindows ? pathExtExe.split(colon) : [''] - - if (isWindows) { - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } - - return { - pathEnv, - pathExt, - pathExtExe, - } -} - -const which = (cmd, opt, cb) => { - if (typeof opt === 'function') { - cb = opt - opt = {} - } - if (!opt) - opt = {} - - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] - - const step = i => new Promise((resolve, reject) => { - if (i === pathEnv.length) - return opt.all && found.length ? resolve(found) - : reject(getNotFoundError(cmd)) - - const ppRaw = pathEnv[i] - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - - const pCmd = path.join(pathPart, cmd) - const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd - : pCmd - - resolve(subStep(p, i, 0)) - }) - - const subStep = (p, i, ii) => new Promise((resolve, reject) => { - if (ii === pathExt.length) - return resolve(step(i + 1)) - const ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return resolve(p + ext) - } - return resolve(subStep(p, i, ii + 1)) - }) - }) - - return cb ? step(0).then(res => cb(null, res), cb) : step(0) -} - -const whichSync = (cmd, opt) => { - opt = opt || {} - - const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) - const found = [] - - for (let i = 0; i < pathEnv.length; i ++) { - const ppRaw = pathEnv[i] - const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw - - const pCmd = path.join(pathPart, cmd) - const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd - : pCmd - - for (let j = 0; j < pathExt.length; j ++) { - const cur = p + pathExt[j] - try { - const is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur - } - } catch (ex) {} - } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null - - throw getNotFoundError(cmd) -} - -module.exports = which -which.sync = whichSync